Skip to content

Commit e514282

Browse files
author
Benjamin Cance
committed
Update the CSV header, and the CSV writer function for organization, and better formatting choices.
1 parent 91c6ff0 commit e514282

File tree

2 files changed

+91
-30
lines changed

2 files changed

+91
-30
lines changed

src/analyzeMFT/constants.py

Lines changed: 65 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -270,17 +270,70 @@
270270
# MFT Record magic number
271271
MFT_RECORD_MAGIC = b'FILE'
272272

273-
# CSV Header
274273
CSV_HEADER = [
275-
'Record Number', 'Good', 'Active', 'Record type', 'Sequence Number',
276-
'Parent File Rec. #', 'Parent File Rec. Seq. #', 'Filename',
277-
'Std Info Creation Time', 'Std Info Modification Time',
278-
'Std Info Access Time', 'Std Info Entry Time',
279-
'FN Info Creation Time', 'FN Info Modification Time',
280-
'FN Info Access Time', 'FN Info Entry Time',
281-
'Object ID', 'Birth Volume ID', 'Birth Object ID', 'Birth Domain ID',
282-
'Standard Information', 'Attribute List', 'File Name',
283-
'Volume Name', 'Volume Info', 'Data', 'Index Root',
284-
'Index Allocation', 'Bitmap', 'Reparse Point', 'EA Information', 'EA',
285-
'Logged Utility Stream', 'Filepath'
274+
# Basic Record Information
275+
'Record Number',
276+
'Record Status', # Instead of 'Good'/'Bad'
277+
'Record Type', # Instead of 'Active'/'Inactive'
278+
'File Type', # Instead of 'Record type'
279+
'Sequence Number',
280+
'Parent Record Number',
281+
'Parent Record Sequence Number',
282+
283+
# File Information
284+
'Filename',
285+
'Filepath',
286+
287+
# Standard Information Times
288+
'SI Creation Time',
289+
'SI Modification Time',
290+
'SI Access Time',
291+
'SI Entry Time',
292+
293+
# File Name Attribute Times
294+
'FN Creation Time',
295+
'FN Modification Time',
296+
'FN Access Time',
297+
'FN Entry Time',
298+
299+
# Object ID Information
300+
'Object ID',
301+
'Birth Volume ID',
302+
'Birth Object ID',
303+
'Birth Domain ID',
304+
305+
# Attribute Presence Flags
306+
'Has Standard Information',
307+
'Has Attribute List',
308+
'Has File Name',
309+
'Has Volume Name',
310+
'Has Volume Information',
311+
'Has Data',
312+
'Has Index Root',
313+
'Has Index Allocation',
314+
'Has Bitmap',
315+
'Has Reparse Point',
316+
'Has EA Information',
317+
'Has EA',
318+
'Has Logged Utility Stream',
319+
320+
# Detailed Attribute Information
321+
'Attribute List Details',
322+
'Security Descriptor',
323+
'Volume Name',
324+
'Volume Information',
325+
'Data Attribute',
326+
'Index Root',
327+
'Index Allocation',
328+
'Bitmap',
329+
'Reparse Point',
330+
'EA Information',
331+
'EA',
332+
'Logged Utility Stream',
333+
334+
# Hash Information (if computed)
335+
'MD5',
336+
'SHA256',
337+
'SHA512',
338+
'CRC32'
286339
]

src/analyzeMFT/mft_record.py

Lines changed: 26 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -386,39 +386,45 @@ def parse_logged_utility_stream(self, offset):
386386
def to_csv(self):
387387
row = [
388388
self.recordnum,
389-
"Good" if self.magic == int.from_bytes(MFT_RECORD_MAGIC, BYTE_ORDER) else "Bad",
390-
"Active" if self.flags & FILE_RECORD_IN_USE else "Inactive",
389+
"Valid" if self.magic == int.from_bytes(MFT_RECORD_MAGIC, BYTE_ORDER) else "Invalid",
390+
"In Use" if self.flags & FILE_RECORD_IN_USE else "Not in Use",
391391
self.get_file_type(),
392392
self.seq,
393-
self.parent_ref,
394-
self.base_ref >> 48, # Parent File Rec. Seq. #
393+
self.get_parent_record_num(),
394+
self.base_ref >> 48,
395+
395396
self.filename,
397+
"", # Filepath (to be filled later)
398+
396399
self.si_times['crtime'].dtstr,
397400
self.si_times['mtime'].dtstr,
398401
self.si_times['atime'].dtstr,
399402
self.si_times['ctime'].dtstr,
403+
400404
self.fn_times['crtime'].dtstr,
401405
self.fn_times['mtime'].dtstr,
402406
self.fn_times['atime'].dtstr,
403407
self.fn_times['ctime'].dtstr,
408+
404409
self.object_id,
405410
self.birth_volume_id,
406411
self.birth_object_id,
407412
self.birth_domain_id,
408-
"True" if STANDARD_INFORMATION_ATTRIBUTE in self.attribute_types else "False",
409-
"True" if ATTRIBUTE_LIST_ATTRIBUTE in self.attribute_types else "False",
410-
"True" if FILE_NAME_ATTRIBUTE in self.attribute_types else "False",
411-
"True" if VOLUME_NAME_ATTRIBUTE in self.attribute_types else "False",
412-
"True" if VOLUME_INFORMATION_ATTRIBUTE in self.attribute_types else "False",
413-
"True" if DATA_ATTRIBUTE in self.attribute_types else "False",
414-
"True" if INDEX_ROOT_ATTRIBUTE in self.attribute_types else "False",
415-
"True" if INDEX_ALLOCATION_ATTRIBUTE in self.attribute_types else "False",
416-
"True" if BITMAP_ATTRIBUTE in self.attribute_types else "False",
417-
"True" if REPARSE_POINT_ATTRIBUTE in self.attribute_types else "False",
418-
"True" if EA_INFORMATION_ATTRIBUTE in self.attribute_types else "False",
419-
"True" if EA_ATTRIBUTE in self.attribute_types else "False",
420-
"True" if LOGGED_UTILITY_STREAM_ATTRIBUTE in self.attribute_types else "False",
421-
"", # Filepath
413+
414+
str(STANDARD_INFORMATION_ATTRIBUTE in self.attribute_types),
415+
str(ATTRIBUTE_LIST_ATTRIBUTE in self.attribute_types),
416+
str(FILE_NAME_ATTRIBUTE in self.attribute_types),
417+
str(VOLUME_NAME_ATTRIBUTE in self.attribute_types),
418+
str(VOLUME_INFORMATION_ATTRIBUTE in self.attribute_types),
419+
str(DATA_ATTRIBUTE in self.attribute_types),
420+
str(INDEX_ROOT_ATTRIBUTE in self.attribute_types),
421+
str(INDEX_ALLOCATION_ATTRIBUTE in self.attribute_types),
422+
str(BITMAP_ATTRIBUTE in self.attribute_types),
423+
str(REPARSE_POINT_ATTRIBUTE in self.attribute_types),
424+
str(EA_INFORMATION_ATTRIBUTE in self.attribute_types),
425+
str(EA_ATTRIBUTE in self.attribute_types),
426+
str(LOGGED_UTILITY_STREAM_ATTRIBUTE in self.attribute_types),
427+
422428
str(self.attribute_list),
423429
str(self.security_descriptor),
424430
self.volume_name,
@@ -434,6 +440,8 @@ def to_csv(self):
434440
]
435441
if self.md5 is not None:
436442
row.extend([self.md5, self.sha256, self.sha512, self.crc32])
443+
else:
444+
row.extend([""] * 4) # Add empty strings for hash fields if not computed
437445
return row
438446

439447
def compute_hashes(self):

0 commit comments

Comments
 (0)