diff --git a/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java b/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java index 74092f998122251e7eb74e2a70b574c2a5b85488..eb4e8a5535bfb7c592e42240118589789daa6af5 100644 --- a/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java +++ b/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java @@ -21,6 +21,8 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.RandomAccessFile; +import java.sql.SQLException; +import java.sql.Statement; import java.text.MessageFormat; import java.util.ArrayList; import java.util.List; @@ -30,6 +32,7 @@ import java.util.TimeZone; import java.util.logging.Level; import java.util.logging.Logger; +import static org.sleuthkit.datamodel.SleuthkitCase.closeStatement; import org.sleuthkit.datamodel.TskData.FileKnown; import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM; import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM; @@ -71,11 +74,14 @@ public abstract class AbstractFile extends AbstractContent { * knownState status in database */ protected TskData.FileKnown knownState; + private boolean knownStateDirty = false; /* * md5 hash */ protected String md5Hash; + private boolean md5HashDirty = false; private String mimeType; + private boolean mimeTypeDirty = false; private static final Logger logger = Logger.getLogger(AbstractFile.class.getName()); private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle"); private long dataSourceObjectId; @@ -161,7 +167,7 @@ public abstract class AbstractFile extends AbstractContent { } this.parentPath = parentPath; this.mimeType = mimeType; - this.extension = extension == null?"":extension; + this.extension = extension == null ? "" : extension; this.encodingType = TskData.EncodingType.NONE; } @@ -430,10 +436,14 @@ public String getMIMEType() { /** * Sets the MIME type for this file. * - * @param mimeType The mimeType to set for this file. + * IMPORTANT: The MIME type is set for this AbstractFile object, but it is + * not saved to the case database until AbstractFile.save is called. + * + * @param mimeType The MIME type of this file. */ - void setMIMEType(String mimeType) { + public void setMIMEType(String mimeType) { this.mimeType = mimeType; + this.mimeTypeDirty = true; } public boolean isModeSet(TskData.TSK_FS_META_MODE_ENUM mode) { @@ -441,14 +451,16 @@ public boolean isModeSet(TskData.TSK_FS_META_MODE_ENUM mode) { } /** - * Sets md5 hash string Note: database or other FsContent objects are not - * updated. Currently only SleuthkiCase calls it to update the object while - * updating tsk_files entry + * Sets the MD5 hash for this file. + * + * IMPORTANT: The MD5 hash is set for this AbstractFile object, but it is + * not saved to the case database until AbstractFile.save is called. * - * @param md5Hash + * @param md5Hash The MD5 hash of the file. */ - void setMd5Hash(String md5Hash) { + public void setMd5Hash(String md5Hash) { this.md5Hash = md5Hash; + this.md5HashDirty = true; } /** @@ -461,14 +473,16 @@ public String getMd5Hash() { } /** - * Sets knownState status Note: database or other file objects are not - * updated. Currently only SleuthkiCase calls it to update the object while - * updating tsk_files entry + * Sets the known state for this file. * - * @param known + * IMPORTANT: The known state is set for this AbstractFile object, but it is + * not saved to the case database until AbstractFile.save is called. + * + * @param knownState The known state of the file. */ - void setKnown(TskData.FileKnown known) { - this.knownState = known; + public void setKnown(TskData.FileKnown knownState) { + this.knownState = knownState; + this.knownStateDirty = true; } /** @@ -1023,9 +1037,6 @@ public String toString(boolean preserveState) { + "]\t"; } - - - /** * Possible return values for comparing a file to a list of mime types */ @@ -1054,6 +1065,57 @@ public MimeMatchEnum isMimeType(SortedSet<String> mimeTypes) { return MimeMatchEnum.FALSE; } + /** + * Saves the editable file properties of this file to the case database, + * e.g., the MIME type, MD5 hash, and known state. + * + * @throws TskCoreException if there is an error saving the editable file + * properties to the case database. + */ + public void save() throws TskCoreException { + + // No fields have been updated + if (!(md5HashDirty || mimeTypeDirty || knownStateDirty)) { + return; + } + + String queryStr = ""; + if (mimeTypeDirty) { + queryStr = "mime_type = '" + this.getMIMEType() + "'"; + } + if (md5HashDirty) { + if (!queryStr.isEmpty()) { + queryStr += ", "; + } + queryStr += "md5 = '" + this.getMd5Hash() + "'"; + } + if (knownStateDirty) { + if (!queryStr.isEmpty()) { + queryStr += ", "; + } + queryStr += "known = '" + this.getKnown().getFileKnownValue() + "'"; + } + + queryStr = "UPDATE tsk_files SET " + queryStr + " WHERE obj_id = " + this.getId(); + + SleuthkitCase.CaseDbConnection connection = getSleuthkitCase().getConnection(); + Statement statement = null; + + try { + statement = connection.createStatement(); + connection.executeUpdate(statement, queryStr); + + md5HashDirty = false; + mimeTypeDirty = false; + knownStateDirty = false; + } catch (SQLException ex) { + throw new TskCoreException(String.format("Error saving properties for file (obj_id = %s)", this.getId()), ex); + } finally { + closeStatement(statement); + connection.close(); + } + } + /** * Initializes common fields used by AbstactFile implementations (objects in * tsk_files table) @@ -1178,43 +1240,42 @@ public short getAttrId() { protected void setLocalPath(String localPath, boolean isAbsolute) { setLocalFilePath(localPath, isAbsolute); } - + /* * ------------------------------------------------------------------------- * Util methods to convert / map the data * ------------------------------------------------------------------------- */ - /** * Return the epoch into string in ISO 8601 dateTime format * * @param epoch time in seconds * * @return formatted date time string as "yyyy-MM-dd HH:mm:ss" - * + * * @deprecated */ - @Deprecated + @Deprecated public static String epochToTime(long epoch) { return TimeUtilities.epochToTime(epoch); } /** - * Return the epoch into string in ISO 8601 dateTime format, - * in the given timezone + * Return the epoch into string in ISO 8601 dateTime format, in the given + * timezone * * @param epoch time in seconds * @param tzone time zone * * @return formatted date time string as "yyyy-MM-dd HH:mm:ss" - * + * * @deprecated */ @Deprecated public static String epochToTime(long epoch, TimeZone tzone) { return TimeUtilities.epochToTime(epoch, tzone); } - + /** * Convert from ISO 8601 formatted date time string to epoch time in seconds * diff --git a/bindings/java/src/org/sleuthkit/datamodel/HashUtility.java b/bindings/java/src/org/sleuthkit/datamodel/HashUtility.java index b25700e5c3a8d501d2dc651991096f170914f0b1..90d65f2ad1239022e77b4f99805393c3b0d55b96 100644 --- a/bindings/java/src/org/sleuthkit/datamodel/HashUtility.java +++ b/bindings/java/src/org/sleuthkit/datamodel/HashUtility.java @@ -32,18 +32,18 @@ public class HashUtility { private final static int BUFFER_SIZE = 16 * 1024; - + /** - * Calculate the MD5 hash for the given FsContent and store it in the - * database + * Calculate the MD5 hash for the given FsContent * - * @param file file object whose md5 hash we want to calculate + * @param content content object whose md5 hash we want to calculate * * @return md5 of the given FsContent object + * @throws java.io.IOException */ - static public String calculateMd5(AbstractFile file) throws IOException { + static public String calculateMd5Hash(Content content) throws IOException { String hashText = ""; - InputStream in = new ReadContentInputStream(file); + InputStream in = new ReadContentInputStream(content); Logger logger = Logger.getLogger(HashUtility.class.getName()); try { byte[] buffer = new byte[BUFFER_SIZE]; @@ -60,11 +60,8 @@ static public String calculateMd5(AbstractFile file) throws IOException { while (hashText.length() < 32) { hashText = "0" + hashText; } - file.getSleuthkitCase().setMd5Hash(file, hashText); } catch (NoSuchAlgorithmException ex) { logger.log(Level.WARNING, "No algorithm known as 'md5'", ex); //NON-NLS - } catch (TskCoreException ex) { - logger.log(Level.WARNING, "Error updating content's md5 in database", ex); //NON-NLS } finally { in.close(); } @@ -83,4 +80,27 @@ static public String calculateMd5(AbstractFile file) throws IOException { public static boolean isNoDataMd5(String md5) { return md5.toLowerCase().equals("d41d8cd98f00b204e9800998ecf8427e"); //NON-NLS } + + /** + * Calculate the MD5 hash for the given FsContent and store it in the + * database + * + * @param file file object whose md5 hash we want to calculate + * + * @return md5 of the given FsContent object + * @throws java.io.IOException + * + * @deprecated + */ + @Deprecated + static public String calculateMd5(AbstractFile file) throws IOException { + Logger logger = Logger.getLogger(HashUtility.class.getName()); + String md5Hash = calculateMd5Hash(file); + try{ + file.getSleuthkitCase().setMd5Hash(file, md5Hash); + } catch (TskCoreException ex) { + logger.log(Level.WARNING, "Error updating content's md5 in database", ex); //NON-NLS + } + return md5Hash; + } } diff --git a/tsk/fs/ext2fs.c b/tsk/fs/ext2fs.c index 7feec481060fea23a2078a656052fea83a982062..84ac20595403856422fba5c4993b6427ce6a625e 100644 --- a/tsk/fs/ext2fs.c +++ b/tsk/fs/ext2fs.c @@ -135,7 +135,7 @@ static uint8_t TSK_OFF_T offs; ssize_t cnt; - if (!gd_size) + if (gd_size < sizeof(ext4fs_gd)) gd_size = sizeof(ext4fs_gd); if (ext2fs->ext4_grp_buf == NULL) { @@ -180,7 +180,7 @@ static uint8_t else { TSK_OFF_T offs; ssize_t cnt; - if (!gd_size) + if (gd_size < sizeof(ext2fs_gd)) gd_size = sizeof(ext2fs_gd); if (ext2fs->grp_buf == NULL) { diff --git a/tsk/fs/hfs_dent.c b/tsk/fs/hfs_dent.c index 39cbea698282faef31b5fc844f7dbd2d7b2566e0..881a197b86ee01054363564d427a0786f7dcd8d6 100644 --- a/tsk/fs/hfs_dent.c +++ b/tsk/fs/hfs_dent.c @@ -263,7 +263,13 @@ hfs_dir_open_meta_cb(HFS_INFO * hfs, int8_t level_type, info->fs_name->type = TSK_FS_NAME_TYPE_DIR; info->fs_name->flags = TSK_FS_NAME_FLAG_ALLOC; - + // Make sure there is enough space in cur_key for the name + // (name is unicode so each characters is two bytes; 6 bytes of non-name characters) + if ((uint32_t)(tsk_getu16(hfs->fs_info.endian, cur_key->name.length)) * 2 > tsk_getu16(hfs->fs_info.endian, cur_key->key_len) - 6) { + error_returned + ("hfs_dir_open_meta_cb: name length is too long"); + return HFS_BTREE_CB_ERR; + } if (hfs_UTF16toUTF8(fs, (uint8_t *) cur_key->name.unicode, tsk_getu16(hfs->fs_info.endian, cur_key->name.length), info->fs_name->name, HFS_MAXNAMLEN + 1, @@ -314,6 +320,14 @@ hfs_dir_open_meta_cb(HFS_INFO * hfs, int8_t level_type, file->std.perm.mode)); } info->fs_name->flags = TSK_FS_NAME_FLAG_ALLOC; + + // Make sure there is enough space in cur_key for the name + // (name is unicode so each characters is two bytes; 6 bytes of non-name characters) + if ((uint32_t)(tsk_getu16(hfs->fs_info.endian, cur_key->name.length)) * 2 > tsk_getu16(hfs->fs_info.endian, cur_key->key_len) - 6) { + error_returned + ("hfs_dir_open_meta_cb: name length is too long"); + return HFS_BTREE_CB_ERR; + } if (hfs_UTF16toUTF8(fs, (uint8_t *) cur_key->name.unicode, tsk_getu16(hfs->fs_info.endian, cur_key->name.length), info->fs_name->name, HFS_MAXNAMLEN + 1, diff --git a/tsk/fs/ntfs.c b/tsk/fs/ntfs.c index 7cbbdfc7ec2786ee49b5672133485f7b2dd3baf5..1893b443157423461622c2af9d976ad54ce7fd7a 100755 --- a/tsk/fs/ntfs.c +++ b/tsk/fs/ntfs.c @@ -609,9 +609,22 @@ ntfs_make_data_run(NTFS_INFO * ntfs, TSK_OFF_T start_vcn, * these for loops are the equivalent of the getuX macros */ idx = 0; - /* Get the length of this run */ + + /* Get the length of this run. + * A length of more than eight bytes will not fit in the + * 64-bit length field (and is likely corrupt) + */ + if (NTFS_RUNL_LENSZ(run) > 8) { + tsk_error_reset(); + tsk_error_set_errno(TSK_ERR_FS_INODE_COR); + tsk_error_set_errstr + ("ntfs_make_run: Run length is too large to process"); + tsk_fs_attr_run_free(*a_data_run_head); + *a_data_run_head = NULL; + return TSK_COR; + } for (i = 0, data_run->len = 0; i < NTFS_RUNL_LENSZ(run); i++) { - data_run->len |= (run->buf[idx++] << (i * 8)); + data_run->len |= ((uint64_t)(run->buf[idx++]) << (i * 8)); if (tsk_verbose) tsk_fprintf(stderr, "ntfs_make_data_run: Len idx: %i cur: %" @@ -4809,9 +4822,11 @@ ntfs_close(TSK_FS_INFO * fs) #endif fs->tag = 0; - free((char *) ntfs->fs); + if(ntfs->fs) + free((char *) ntfs->fs); tsk_fs_attr_run_free(ntfs->bmap); - free(ntfs->bmap_buf); + if(ntfs->bmap_buf) + free(ntfs->bmap_buf); tsk_fs_file_close(ntfs->mft_file); if (ntfs->orphan_map) @@ -5083,7 +5098,6 @@ ntfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset, ntfs->mft_data = tsk_fs_attrlist_get(ntfs->mft_file->meta->attr, NTFS_ATYPE_DATA); if (!ntfs->mft_data) { - tsk_fs_file_close(ntfs->mft_file); tsk_error_errstr2_concat(" - Data Attribute not found in $MFT"); if (tsk_verbose) fprintf(stderr, @@ -5106,7 +5120,6 @@ ntfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset, /* load the version of the file system */ if (ntfs_load_ver(ntfs)) { - tsk_fs_file_close(ntfs->mft_file); if (tsk_verbose) fprintf(stderr, "ntfs_open: Error loading file system version ((%s)\n", @@ -5116,7 +5129,6 @@ ntfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset, /* load the data block bitmap data run into ntfs_info */ if (ntfs_load_bmap(ntfs)) { - tsk_fs_file_close(ntfs->mft_file); if (tsk_verbose) fprintf(stderr, "ntfs_open: Error loading block bitmap (%s)\n", tsk_error_get()); @@ -5128,7 +5140,6 @@ ntfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset, #if TSK_USE_SID if (ntfs_load_secure(ntfs)) { - tsk_fs_file_close(ntfs->mft_file); if (tsk_verbose) fprintf(stderr, "ntfs_open: Error loading Secure Info (%s)\n", tsk_error_get()); @@ -5160,16 +5171,6 @@ ntfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset, return fs; on_error: - if( fs != NULL ) { - // Since fs->tag is ntfs->fs_info.tag why is this value set to 0 - // and the memory is freed directly afterwards? - fs->tag = 0; - } - if( ntfs != NULL ) { - if( ntfs->fs != NULL ) { - free( ntfs->fs ); - } - free( ntfs ); - } + ntfs_close(fs); return NULL; }