@@ -386,39 +386,45 @@ def parse_logged_utility_stream(self, offset):
386386 def to_csv (self ):
387387 row = [
388388 self .recordnum ,
389- "Good " if self .magic == int .from_bytes (MFT_RECORD_MAGIC , BYTE_ORDER ) else "Bad " ,
390- "Active " if self .flags & FILE_RECORD_IN_USE else "Inactive " ,
389+ "Valid " if self .magic == int .from_bytes (MFT_RECORD_MAGIC , BYTE_ORDER ) else "Invalid " ,
390+ "In Use " if self .flags & FILE_RECORD_IN_USE else "Not in Use " ,
391391 self .get_file_type (),
392392 self .seq ,
393- self .parent_ref ,
394- self .base_ref >> 48 , # Parent File Rec. Seq. #
393+ self .get_parent_record_num (),
394+ self .base_ref >> 48 ,
395+
395396 self .filename ,
397+ "" , # Filepath (to be filled later)
398+
396399 self .si_times ['crtime' ].dtstr ,
397400 self .si_times ['mtime' ].dtstr ,
398401 self .si_times ['atime' ].dtstr ,
399402 self .si_times ['ctime' ].dtstr ,
403+
400404 self .fn_times ['crtime' ].dtstr ,
401405 self .fn_times ['mtime' ].dtstr ,
402406 self .fn_times ['atime' ].dtstr ,
403407 self .fn_times ['ctime' ].dtstr ,
408+
404409 self .object_id ,
405410 self .birth_volume_id ,
406411 self .birth_object_id ,
407412 self .birth_domain_id ,
408- "True" if STANDARD_INFORMATION_ATTRIBUTE in self .attribute_types else "False" ,
409- "True" if ATTRIBUTE_LIST_ATTRIBUTE in self .attribute_types else "False" ,
410- "True" if FILE_NAME_ATTRIBUTE in self .attribute_types else "False" ,
411- "True" if VOLUME_NAME_ATTRIBUTE in self .attribute_types else "False" ,
412- "True" if VOLUME_INFORMATION_ATTRIBUTE in self .attribute_types else "False" ,
413- "True" if DATA_ATTRIBUTE in self .attribute_types else "False" ,
414- "True" if INDEX_ROOT_ATTRIBUTE in self .attribute_types else "False" ,
415- "True" if INDEX_ALLOCATION_ATTRIBUTE in self .attribute_types else "False" ,
416- "True" if BITMAP_ATTRIBUTE in self .attribute_types else "False" ,
417- "True" if REPARSE_POINT_ATTRIBUTE in self .attribute_types else "False" ,
418- "True" if EA_INFORMATION_ATTRIBUTE in self .attribute_types else "False" ,
419- "True" if EA_ATTRIBUTE in self .attribute_types else "False" ,
420- "True" if LOGGED_UTILITY_STREAM_ATTRIBUTE in self .attribute_types else "False" ,
421- "" , # Filepath
413+
414+ str (STANDARD_INFORMATION_ATTRIBUTE in self .attribute_types ),
415+ str (ATTRIBUTE_LIST_ATTRIBUTE in self .attribute_types ),
416+ str (FILE_NAME_ATTRIBUTE in self .attribute_types ),
417+ str (VOLUME_NAME_ATTRIBUTE in self .attribute_types ),
418+ str (VOLUME_INFORMATION_ATTRIBUTE in self .attribute_types ),
419+ str (DATA_ATTRIBUTE in self .attribute_types ),
420+ str (INDEX_ROOT_ATTRIBUTE in self .attribute_types ),
421+ str (INDEX_ALLOCATION_ATTRIBUTE in self .attribute_types ),
422+ str (BITMAP_ATTRIBUTE in self .attribute_types ),
423+ str (REPARSE_POINT_ATTRIBUTE in self .attribute_types ),
424+ str (EA_INFORMATION_ATTRIBUTE in self .attribute_types ),
425+ str (EA_ATTRIBUTE in self .attribute_types ),
426+ str (LOGGED_UTILITY_STREAM_ATTRIBUTE in self .attribute_types ),
427+
422428 str (self .attribute_list ),
423429 str (self .security_descriptor ),
424430 self .volume_name ,
@@ -434,6 +440,8 @@ def to_csv(self):
434440 ]
435441 if self .md5 is not None :
436442 row .extend ([self .md5 , self .sha256 , self .sha512 , self .crc32 ])
443+ else :
444+ row .extend (["" ] * 4 ) # Add empty strings for hash fields if not computed
437445 return row
438446
439447 def compute_hashes (self ):
0 commit comments