diff --git a/bindings/java/doxygen/Doxyfile b/bindings/java/doxygen/Doxyfile
index 8fb716c40ffb6b92a64c0a6fc6c07fd19a7ec3b6..adb110b6d69d5c13ee309f492c7c5d8ffda6e95e 100644
--- a/bindings/java/doxygen/Doxyfile
+++ b/bindings/java/doxygen/Doxyfile
@@ -770,7 +770,6 @@ INPUT                  = main.dox \
                          schema/schema_list.dox \
                          schema/db_schema_8_6.dox \
                          schema/db_schema_9_0.dox \
-                         schema/db_schema_9_1.dox \
                          ../src
 
 # This tag can be used to specify the character encoding of the source files
diff --git a/bindings/java/doxygen/blackboard.dox b/bindings/java/doxygen/blackboard.dox
index fc51e88b9699fcfbdcc4d6603340602e1f134876..7c4288e91bf8495f96a96e76e466da2862542fe5 100644
--- a/bindings/java/doxygen/blackboard.dox
+++ b/bindings/java/doxygen/blackboard.dox
@@ -36,9 +36,9 @@ Consult the \ref artifact_catalog_page "artifact catalog" for a list of built-in
 There are may ways to create artifacts, but we will focus on creating them through the Blackboard class or directly through a Content object. Regardless of how they are created, all artifacts must be associated with a Content object. 
 
 <ul>
-<li>org.sleuthkit.datamodel.AbstractContent.newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId)
+<li>org.sleuthkit.datamodel.AbstractContent.newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, OsAccount osAccount)
 <li>org.sleuthkit.datamodel.AbstractContent.newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList)
-<li>org.sleuthkit.datamodel.Blackboard.newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId, Collection<BlackboardAttribute> attributes, Long osAccountId)
+<li>org.sleuthkit.datamodel.Blackboard.newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId, Collection<BlackboardAttribute> attributes, OsAccount osAccount)
 <li>org.sleuthkit.datamodel.Blackboard.newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, 
 			String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, CaseDbTransaction transaction)
 </ul>
diff --git a/bindings/java/doxygen/main.dox b/bindings/java/doxygen/main.dox
index bc31b67c566e5b2a4ef93d07a29a5abfc2f8a40f..bf9ba7cb421a08aedce21e289846825658c9efee 100644
--- a/bindings/java/doxygen/main.dox
+++ b/bindings/java/doxygen/main.dox
@@ -47,7 +47,7 @@ You can also access the data in its tree form by starting with org.sleuthkit.dat
 The Sleuth Kit has its own database schema that is shared with Autopsy and other tools. The primary way it gets populated is via the Java code. 
 
 - Database Schema Documentation:
- - \subpage db_schema_9_1_page 
+ - \subpage db_schema_9_0_page 
  - \subpage db_schema_page "Older schemas"
 - Refer to \subpage query_database_page if you are going to use one of the SleuthkitCase methods that requires you to specify a query. 
 - Refer to \subpage insert_and_update_database_page if you are a Sleuth Kit developer and want to avoid database issues.
diff --git a/bindings/java/doxygen/schema/db_schema_9_0.dox b/bindings/java/doxygen/schema/db_schema_9_0.dox
index 182b6c797bbb50f7923710491815f21d3d69df39..2e1fcaf161e25df1fa9321c729bfe6f74841b52b 100644
--- a/bindings/java/doxygen/schema/db_schema_9_0.dox
+++ b/bindings/java/doxygen/schema/db_schema_9_0.dox
@@ -1,7 +1,543 @@
 /*! \page db_schema_9_0_page TSK & Autopsy Database Schema (Schema version 9.0)
 
-Schema 9.0 is not associated with a released version of Autopsy and is almost the same as schema 9.1. 
+[TOC]
+
+# Introduction
+
+This page outlines version 9.0 the database that is used by The Sleuth Kit and Autopsy. The goal of this page is to provide short descriptions for each table and column and not focus on foreign key requirements, etc. If you want that level of detail, then refer to the actual schema in addition to this. 
+
+Each Autopsy release is associated with a schema version with a major and minor version number. If a case with an older schema version is opened in a new version of Autopsy, the case will automatically be updated to the current schema. Going the other direction (opening a case that was created with a newer version of Autopsy), two things may happen:
+- If the case database has the same major number as the version of Autopsy being used, the case should generally be able to be opened and used.
+- If the case database has a higher major number than the version of Autopsy being used, an error will be displayed when attempting to open the case. 
+
+You can find a basic graphic of some of the table relationships <a href="https://docs.google.com/drawings/d/1omR_uUAp1fQt720oJ-kk8C48BXmVa3PNjPZCDdT0Tb4/edit?usp#sharing">here</a>
+
+
+Some general notes on this schema:
+- Nearly every type of data is assigned a unique ID, called the Object ID
+- The objects form a hierarchy, that shows where data came from.  A child comes from its parent.  
+ - For example, disk images are the root, with a volume system below it, then a file system, and then files and directories. 
+- This schema has been designed to store data beyond the file system data that The Sleuth Kit supports. It can store carved files, a folder full of local files, etc.
+- The Blackboard is used to store artifacts, which contain attributes (name/value pairs).  Artifacts are used to store data types that do not have more formal tables. Module writers can make whatever artifact types they want. See \ref mod_bbpage for more details. 
+- The Sleuth Kit will make virtual files to span the unallocated space.  They will have a naming format of 'Unalloc_[PARENT-OBJECT-ID]_[BYTE-START]_[BYTE-END]'.
+
+# Schema Information
+
+This was a big change. Tables were added to support analysis results, OS accounts, hosts and person structure of data sources, and host addresses (IPs, DNS, etc.). The major component of the version number has been incremented because there are new org.sleuthkit.datamodel.TskData.ObjectType enum types (OsAccount and HostAddress). More information on how to use these new objects can be found on the \ref mod_dspage and \ref mod_os_accounts_page pages.
+
+<ul>
+<li><b>Autopsy versions: </b> Autopsy 4.19
+<li><b>Changes from version 8.6:</b>
+<ul>
+<li> New columns:
+<ul>
+<li>host_id, added_date_time, acquisition_tool_settings, acquisition_tool_name, acquisition_tool_version in data_source_info
+<li>category_type in artifact_types
+<li>owner_uid, os_account_obj_id in tsk_files
+</ul>
+<li> New tables:
+<ul>
+<li>tsk_aggregate_score
+<li>tsk_analysis_results
+<li>tsk_data_artifacts
+<li>tsk_file_attributes
+<li>tsk_hosts
+<li>tsk_host_addresses
+<li>tsk_host_address_dns_ip_map
+<li>tsk_host_address_usage
+<li>tsk_os_accounts
+<li>tsk_os_account_attributes
+<li>tsk_os_account_instances
+<li>tsk_os_account_realms
+<li>tsk_persons
+</ul>
+</ul>
+</ul>
+
+
+# General Information Tables 
+## tsk_db_info 
+Metadata about the database.
+- **schema_ver** - Major version number of the current database schema
+- **tsk_ver** - Version of TSK used to create database
+- **schema_minor_version** - Minor version number of the current database schema
+
+## tsk_db_info_extended
+Name & Value pair table to store any information about the database.  For example, which schema it was created with. etc. 
+- **name** - Any string name
+- **value** - Any string value
+
+
+# Object Tables 
+## tsk_objects 
+Every object (image, volume system, file, etc.) has an entry in this table.  This table allows you to find the parent of a given object and allows objects to be tagged and have children.  This table provides items with a unique object id.  The details of the object are in other tables.  
+- **obj_id** - Unique id 
+- **par_obj_id** - The object id of the parent object (NULL for root objects). The parent of a volume system is an image, the parent of a directory is a directory or filesystem, the parent of a filesystem is a volume or an image, etc.
+- **type** - Object type (as org.sleuthkit.datamodel.TskData.ObjectType enum)
+
+
+# Hosts / Persons
+Stores data related to hosts and persons, which can help organize data sources. 
+
+## tsk_persons
+Stores persons for the case. A peron is someone who owns or used a data source in the case. 
+- **id** - Id of the person
+- **name** - Name of the person (should be human readable)
+
+## tsk_hosts
+Stores hosts that have a data source in the case. Each data source must be associated with a host.  These are NOT created for a reference to an external host (such as a web domain). 
+- **id** - Id of the host
+- **name** - Name of the host (should be human readable)
+- **db_status** - Status of the host (active/merged/deleted as org.sleuthkit.datamodel.Host.HostDbStatus)
+- **person_id** - Optional id of associated person
+- **merged_into** - Stores the host ID that this host was merged into
+
+# Data Source / Device Tables 
+## data_source_info
+Contains information about a data source, which could be an image.  This is where we group data sources into devices (based on device ID).
+- **obj_id** - Id of image/data source in tsk_objects
+- **device_id** - Unique ID (GUID) for the device that contains the data source
+- **time_zone** - Timezone that the data source was originally located in
+- **acquisition_details** - Notes on the acquisition of the data source
+- **added_date_time** - Timestamp of when the data source was added
+- **acquisition_tool_name** - Name of the tool used to acquire the image
+- **acquisition_tool_settings** - Specific settings used by the tool to acquire the image
+- **acquisition_tool_version** - Version of the acquisition tool
+- **host_id** - Host associated with this image (must be set)
+
+
+# Disk Image Tables
+
+## tsk_image_info 
+Contains information about each set of images that is stored in the database. 
+- **obj_id** - Id of image in tsk_objects
+- **type** - Type of disk image format (as org.sleuthkit.datamodel.TskData.TSK_IMG_TYPE_ENUM)
+- **ssize** - Sector size of device in bytes
+- **tzone** - Timezone where image is from (the same format that TSK tools want as input)
+- **size** - Size of the original image (in bytes) 
+- **md5** - MD5 hash of the image (for compressed data such as E01, the hashes are of the decompressed image, not the E01 itself)
+- **sha1** - SHA-1 hash of the image
+- **sha256** - SHA-256 hash of the image
+- **display_name** - Display name of the image
+
+## tsk_image_names
+Stores path(s) to file(s) on disk that make up an image set.
+- **obj_id** - Id of image in tsk_objects
+- **name** - Path to location of image file on disk
+- **sequence** - Position in sequence of image parts
+
+
+# Volume System Tables
+## tsk_vs_info
+Contains one row for every volume system found in the images.
+- **obj_id** - Id of volume system in tsk_objects
+- **vs_type** - Type of volume system / media management (as org.sleuthkit.datamodel.TskData.TSK_VS_TYPE_ENUM)
+- **img_offset** - Byte offset where VS starts in disk image
+- **block_size** - Size of blocks in bytes
+
+## tsk_vs_parts
+Contains one row for every volume / partition in the images. 
+- **obj_id** - Id of volume in tsk_objects
+- **addr** - Address of the partition
+- **start** - Sector offset of start of partition
+- **length** - Number of sectors in partition
+- **desc** - Description of partition (volume system type-specific)
+- **flags** - Flags for partition (as org.sleuthkit.datamodel.TskData.TSK_VS_PART_FLAG_ENUM)
+
+## tsk_pool_info 
+Contains information about pools (for APFS, logical disk management, etc.)
+- **obj_id** - Id of pool in tsk_objects
+- **pool_type** - Type of pool (as org.sleuthkit.datamodel.TskData.TSK_POOL_TYPE_ENUM)
+
+# File System Tables
+## tsk_fs_info
+Contains one for for every file system in the images. 
+- **obj_id** - Id of filesystem in tsk_objects
+- **data_source_obj_id** - Id of the data source for the file system
+- **img_offset** - Byte offset that filesystem starts at
+- **fs_type** - Type of file system (as org.sleuthkit.datamodel.TskData.TSK_FS_TYPE_ENUM)
+- **block_size** - Size of each block (in bytes)
+- **block_count** - Number of blocks in filesystem
+- **root_inum** - Metadata address of root directory
+- **first_inum** - First valid metadata address
+- **last_inum** - Last valid metadata address
+- **display_name** - Display name of file system (could be volume label)
+
+## tsk_files
+Contains one for for every file found in the images.  Has the basic metadata for the file. 
+- **obj_id** - Id of file in tsk_objects
+- **fs_obj_id** - Id of filesystem in tsk_objects (NULL if file is not located in a file system -- carved in unpartitioned space, etc.)
+- **data_source_obj_id** - Id of the data source for the file
+- **attr_type** - Type of attribute (as org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM)
+- **attr_id** - Id of attribute
+- **name** - Name of attribute. Will be NULL if attribute doesn't have a name.  Must not have any slashes in it. 
+- **meta_addr** - Address of the metadata structure that the name points to
+- **meta_seq** - Sequence of the metadata address
+- **type** - Type of file: filesystem, carved, etc. (as org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM enum)
+- **has_layout** - True if file has an entry in tsk_file_layout
+- **has_path** - True if file has an entry in tsk_files_path
+- **dir_type** - File type information: directory, file, etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM)
+- **meta_type** - File type (as org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM)
+- **dir_flags** -  Flags that describe allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM)
+- **meta_flags** - Flags for the file for its allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM)
+- **size** - File size in bytes
+- **ctime** - Last file / metadata status change time (stored in number of seconds since Jan 1, 1970 UTC)
+- **crtime** - Created time
+- **atime** - Last file content accessed time
+- **mtime** - Last file content modification time
+- **mode** - Unix-style permissions (as org.sleuthkit.datamodel.TskData.TSK_FS_META_MODE_ENUM)
+- **uid** - Owner id
+- **gid** - Group id
+- **md5** - MD5 hash of file contents
+- **sha256** - SHA-256 hash of file contents
+- **known** - Known status of file (as org.sleuthkit.datamodel.TskData.FileKnown)
+- **parent_path** - Full path of parent folder. Must begin and end with a '/' (Note that a single '/' is valid)
+- **mime_type** - MIME type of the file content, if it has been detected. 
+- **extension** - File extension
+- **owner_uid** - Unique ID of the owner (SID in Windows)
+- **os_account_obj_id** - ID of optional associated OS account
+
+## tsk_file_layout
+Stores the layout of a file within the image.  A file will have one or more rows in this table depending on how fragmented it was. All file types use this table (file system, carved, unallocated blocks, etc.).
+- **obj_id** - Id of file in tsk_objects
+- **sequence** - Position of the run in the file (0-based and the obj_id and sequence pair will be unique in the table)
+- **byte_start** - Byte offset of fragment relative to the start of the image file
+- **byte_len** - Length of fragment in bytes
+
+
+## tsk_files_path
+If a "locally-stored" file has been imported into the database for analysis, then this table stores its path.  Used for derived files and other files that are not directly in the image file.
+- **obj_id** - Id of file in tsk_objects
+- **path** - Path to where the file is locally stored in a file system
+- **encoding_type** - Method used to store the file on the disk 
+
+## file_encoding_types 
+Methods that can be used to store files on local disks to prevent them from being quarantined by antivirus
+- **encoding_type** - ID of method used to store data.  See org.sleuthkit.datamodel.TskData.EncodingType enum 
+- **name** -  Display name of technique
+
+## tsk_file_attributes
+Stores extended attributes for a particular file that do not have a column in tsk_files. Custom BlackboardAttribute types can be defined. 
+- **id** - Id of the attribute
+- **obj_id** - File this attribute is associated with (references tsk_files)
+- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## tsk_files_derived_method
+Derived files are those that result from analyzing another file.  For example, files that are extracted from a ZIP file will be considered derived.  This table keeps track of the derivation techniques that were used to make the derived files. 
+
+NOTE: This table is not used in any code.
+
+- **derived_id** - Unique id for the derivation method. 
+- **tool_name** - Name of derivation method/tool
+- **tool_version** - Version of tool used in derivation method
+- **other** - Other details
+
+## tsk_files_derived
+Each derived file has a row that captures the information needed to re-derive it
+
+NOTE: This table is not used in any code.
+
+- **obj_id** - Id of file in tsk_objects
+- **derived_id** - Id of derivation method in tsk_files_derived_method
+- **rederive** - Details needed to re-derive file (will be specific to the derivation method)
+
+
+# Blackboard Tables 
+The \ref mod_bbpage "Blackboard" is used to store results and derived data from analysis modules. 
+
+## blackboard_artifacts
+Stores artifacts associated with objects. 
+- **artifact_id** - Id of the artifact (assigned by the database)
+- **obj_id** - Id of the associated object
+- **artifact_obj_id** - Object id of the artifact
+- **artifact_type_id** - Id for the type of artifact (can be looked up in the blackboard_artifact_types table)
+- **data_source_obj_id** - Id of the data source for the artifact
+- **artifact_type_id** - Type of artifact (references artifact_type_id in blackboard_artifact_types)
+- **review_status_id** - Review status (references review_status_id in review_statuses)
+
+## tsk_analysis_results
+Additional information for artifacts that are analysis results
+- **artifact_obj_id** - Object id of the associated artifact (artifact_obj_id column in blackboard_artifacts)
+- **significance** - Significance to show if the result shows the object is relevant (as org.sleuthkit.datamodel.Score.Significance enum)
+- **method_category** - Category of the analysis method used (as org.sleuthkit.datamodel.Score.MethodCategory enum)
+- **conclusion** - Optional, text description of the conclusion of the analysis method. 
+- **configuration** - Otional, text description of the analysis method configuration (such as what hash set or keyword list was used)
+- **justification** - Optional, text description of justification of the conclusion and significance. 
+- **ignore_score** - True (1) if score should be ignored when calculating aggregate score, false (0) otherwise. This allows users to ignore a false positive.
+
+## tsk_data_artifacts
+Additional information for artifacts that store extracted data. 
+- **artifact_obj_id** - Object id of the associated artifact (artifact_obj_id column in blackboard_artifacts)
+- **os_account_obj_id** - Object id of the associated OS account
+
+## blackboard_artifact_types
+Types of artifacts
+- **artifact_type_id** - Id for the type (this is used by the blackboard_artifacts table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+- **category_type** - Indicates whether this is a data artifact or an analysis result
+
+## blackboard_attributes
+Stores name value pairs associated with an artifact. Only one of the value columns should be populated.
+- **artifact_id** - Id of the associated artifact
+- **artifact_type_id** - Artifact type of the associated artifact
+- **source** - Source string, should be module name that created the entry
+- **context** - Additional context string
+- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## blackboard_attribute_types
+Types of attribute
+- **attribute_type_id** - Id for the type (this is used by the blackboard_attributes table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+- **value_type** - Expected type of data for the attribute type (see blackboard_attributes)
+
+## review_statuses
+Review status of an artifact. Should mirror the org.sleuthkit.datamodel.BlackboardArtifact.ReviewStatus enum.
+- **review_status_id** - Id of the status 
+- **review_status_name** - Internal name of the status
+- **display_name** - Display name (should be human readable)
+
+## tsk_aggregate_score
+Stores the score of an object that is a combination of the various analysis result scores
+- **obj_id** - Id of the object that corresponds to this score
+- **data_source_obj_id** - Id of the data source the object belongs to
+- **significance** - Significance (as org.sleuthkit.datamodel.Score.Significance enum)
+- **method_category** - Category of the method used (as org.sleuthkit.datamodel.Score.MethodCategory enum)
+
+
+
+# Host Addresses
+Host addresses are various forms of identifiers assigned to a computer, such as host names or MAC addresses. These tables store data that is also stored in the data artifacts, but these tables allow for correlation and scoring of specific hosts. 
+
+## tsk_host_addresses
+One entry is created in this table for each host address found in the data source.  Examples include domain names (www.sleuthkit.org), IP addresses, and BlueTooth MAC addresses.
+- **id** - Id of the host address
+- **address_type** - Type of address (as org.sleuthkit.datamodel.HostAddress.HostAddressType enum)
+- **address** - Address (must be unique within the scope of address_type). 
+
+## tsk_host_address_dns_ip_map
+Stores data if host names and IP addresses were resolved between each other. 
+- **id** - Id of the mapping
+- **dns_address_id** - Id of the DNS address in tsk_host_addresses
+- **ip_address_id** - Id of the IP address in tsk_host_addresses
+- **source_obj_id** - Id of the object used to determine this mapping (references tsk_objects)
+- **time** - Timestamp when this mapping was recorded
+
+## tsk_host_address_usage
+Tracks which artifacts and files had a reference to a given host address. This is used to show what other artifacts used the same address. 
+- **id** - Id of the usage
+- **addr_obj_id** - Id of the host address
+- **obj_id** - Id of the object that had a reference/usage to the address (references tsk_objects)
+- **data_source_obj_id** - Id of the data source associated with the usage
+
+
+# Operating System Accounts
+Stores data related to operating system accounts.  Communication-related accounts (such as email or social media) are stored in other tables (see Communication Acccounts below).
+
+
+## tsk_os_account_realms
+Every OS Account must belong to a realm, which defines the scope of the account.  Realms can be local to a given computer or domain-based. 
+- **realm_name** - Display bame of the realm (realm_name or realm_addr must be set)
+- **realm_addr** - Address/ID of the realm (realm_name or realm_addr must be set)
+- **realm_signature** - Used internally for unique clause.  realm_addr if it is set.  Otherwise, realm_name.
+- **scope_host_id** - Optional host that this realm is scoped to.  By default, realms are scoped to a given host. 
+- **scope_confidence** - Confidence of the scope of the realm (as org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence enum)
+- **db_status** - Status of this realm in the database (as org.sleuthkit.datamodel.OsAccountRealm.RealmDbStatus enum)
+- **merged_into** - For merged realms, set to the id of the realm they were merged in to.
+
+## tsk_os_accounts
+Stores operating system accounts
+- **os_account_obj_id** - Id of the OS account
+- **realm_id** - Id of the associated realm (references tsk_os_account_realms)
+- **login_name** - Login name (login name or addr must be present)
+- **addr** - Address/ID of account (login name or addr must be present)
+- **signature** - Used internally for unique clause
+- **full_name** - Full name
+- **status** - Status of the account (as org.sleuthkit.datamodel.OsAccount.OsAccountStatus enum)
+- **type** - Type of account (as org.sleuthkit.datamodel.OsAccount.OsAccountType enum)
+- **created_date** - Timestamp of account creation
+- **db_status** - Status of this account in the database (active/merged/deleted)
+- **merged_into** - For merged accounts, set to the id of the account they were merged in to.
+
+## tsk_os_account_attributes
+Stores additional attributes for an OS account. Similar to blackboard_attributes. Attributes can either be specific to a host or domain-scoped. 
+- **id** - Id of the attribute
+- **os_account_obj_id** - Id of the associated OS account
+- **host_id** - Host Id if the attribute is scoped to the host.  NULL if the attribute is domain-scoped.
+- **source_obj_id** - Optional object id of where the attribute data was derived from (such as a registry hive) (references tsk_objects)
+- **attribute_type_id** - Type of attribute (see org.sleuthkit.datamodel.BlackboardAttribute.BlackboardAttribute.Type)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## tsk_os_account_instances
+Records that an OS account is associated with a specific data source.  For example, the account logged in, accessed data, etc. 
+- **id** - Id of the OS account instance
+- **os_account_obj_id** - Id of the OS account that was referenced
+- **data_source_obj_id** - Id of the data source
+- **instance_type** - Type of instance (as org.sleuthkit.datamodel.OsAccountInstance.OsAccountInstanceType enum)
+
+
+# Communication Accounts
+Stores data related to communications between two parties. It is highly recommended to use 
+the org.sleuthkit.datamodel.CommunicationsManager API to create/access this type of data
+(see the \ref mod_compage page).
+
+## accounts
+Stores communication accounts (email, phone number, etc.).  Note that this does not include OS accounts. 
+- **account_id** - Id for the account within the scope of the database (i.e. Row Id) (used in the account_relationships table)
+- **account_type_id** - The type of account (must match an account_type_id entry from the account_types table)
+- **account_unique_identifier** - The phone number/email/other identifier associated with the account that is unique within the Account Type 
+
+## account_types
+Types of accounts and service providers (Phone, email, Twitter, Facebook, etc.)
+- **account_type_id** - Id for the type (this is used by the accounts table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+
+## account_relationships
+Stores non-directional relationships between two accounts if they communicated or had references to each other (such as contact book)
+- **relationship_id** -  Id for the relationship
+- **account1_id** - Id of the first participant (from account_id column in accounts table)
+- **account2_id** - Id of the second participant (from account_id column in accounts table)
+- **relationship_source_obj_id** - Id of the artifact this relationship was derived from (artifact_id column from the blackboard_artifacts)
+- **date_time** - Time the communication took place, stored in number of seconds since Jan 1, 1970 UTC (NULL if unknown)
+- **relationship_type** - The type of relationship (as org.sleuthkit.datamodel.Relationship.Type)
+- **data_source_obj_id** - Id of the data source this relationship came from (from obj_id in data_source_info)
+
+# Timeline
+Stores data used to populate various timelines. Two tables are used to reduce data duplication. It is highly recommended to use 
+the org.sleuthkit.datamodel.TimelineManager API to create/access this type of data.  
+
+## tsk_event_types
+Stores the types for events. The super_type_id column is used to arrange the types into a tree.
+- **event_type_id** - Id for the type
+- **display_name** - Display name for the type (unique, should be human readable)
+- **super_type_id** - Parent type for the type (used for building heirarchy; references the event_type_id in this table)
+
+## tsk_event_descriptions
+Stores descriptions of an event. This table exists to reduce duplicate data that is common to events. For example, a file will have only one row in tsk_event_descriptions, but could have 4+ rows in tsk_events that all refer to the same description. Note that the combination of the full_description, content_obj_id, and artifact_id columns must be unique.
+- **event_description_id** - Id for the event description
+- **full_description** - Full length description of the event (required).  For example, the full file path including file name. 
+- **med_description** - Medium length description of the event (may be null).  For example, a file may have only the first three folder names.
+- **short_description** - Short length description of the event (may be null).  For example, a file may have only its first folder name. 
+- **data_source_obj_id** -  Object id of the data source for the event source (references obj_id column in data_source_info)
+- **content_obj_id** - If the event is from a non-artifact, then this is the object id from that source.  If the event is from an artifact, then this is the object id of the artifact's source. (references obj_id column in tsk_objects)
+- **artifact_id** - If the event is from a non-artifact, this is null. If the event is from an artifact, then this is the id of the artifact (references artifact_id column in blackboard_artifacts) (may be null)
+- **hash_hit** - 1 if the file associated with the event has a hash set hit, 0 otherwise
+- **tagged** - 1 if the direct source of the event has been tagged, 0 otherwise
+
+## tsk_events
+Stores each event. A file, artifact, or other type of content can have several rows in this table. One for each time stamp. 
+- **event_id** - Id for the event
+- **event_type_id** - Event type id (references event_type_id column in tsk_event_types)
+- **event_description_id** - Event description id (references event_description_id column in tsk_event_descriptions)
+- **time** -  Time the event occurred, in seconds from the UNIX epoch
+
+# Examiners and Reports
+
+## tsk_examiners
+Encapsulates the concept of an examiner associated with a case.
+- **examiner_id** - Id for the examiner
+- **login_name** - Login name for the examiner (must be unique)
+- **display_name** - Display name for the examiner (may be null)
+
+## reports
+Stores information on generated reports.
+- **obj_id** - Id of the report
+- **path** - Full path to the report (including file name)
+- **crtime** - Time the report was created, in seconds from the UNIX epoch
+- **src_module_name** - Name of the module that created the report
+- **report_name** - Name of the report (can be empty string)
+
+# Tags 
+
+## tag_names
+Defines what tag names the user has created and can therefore be applied.
+- **tag_name_id** - Unique ID for each tag name
+- **display_name** - Display name of tag
+- **description**  - Description  (can be empty string)
+- **color** - Color choice for tag (can be empty string)
+- **knownStatus** - Stores whether a tag is notable/bad (as org.sleuthkit.datamodel.TskData.FileKnown enum)
+- **tag_set_id** - Id of the tag set the tag name belongs to (references tag_set_id in tsk_tag_sets, may be null)
+- **rank** - Used to order the tag names for a given tag set for display purposes
+
+## tsk_tag_sets
+Used to group entries from the tag_names table. An object can have only one tag from a tag set at a time. 
+- **tag_set_id** - Id of the tag set
+- **name** - Name of the tag set (unique, should be human readable)
+
+## content_tags
+One row for each file tagged.  
+- **tag_id** - unique ID
+- **obj_id** - object id of Content that has been tagged
+- **tag_name_id** - Tag name that was used
+- **comment**  - optional comment 
+- **begin_byte_offset** - optional byte offset into file that was tagged
+- **end_byte_offset** - optional byte ending offset into file that was tagged
+- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
+
+## blackboard_artifact_tags
+One row for each artifact that is tagged.
+- **tag_id** - unique ID
+- **artifact_id** - Artifact ID of artifact that was tagged
+- **tag_name_id** - Tag name that was used
+- **comment** - Optional comment
+- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
+
+
+# Ingest Module Status
+These tables keep track in Autopsy which modules were run on the data sources.
+
+## ingest_module_types
+Defines the types of ingest modules supported. Must exactly match the names and ordering in the org.sleuthkit.datamodel.IngestModuleInfo.IngestModuleType enum.
+- **type_id** - Id for the ingest module type
+- **type_name** - Internal name for the ingest module type
+
+## ingest_modules
+Defines which modules were installed and run on at least one data source.  One row for each module. 
+- **ingest_module_id** - Id of the ingest module
+- **display_name** - Display name for the ingest module (should be human readable)
+- **unique_name** - Unique name for the ingest module
+- **type_id** - Type of ingest module (references type_id from ingest_module_types)
+- **version** - Version of the ingest module
+
+## ingest_job_status_types
+Defines the status options for ingest jobs. Must match the names and ordering in the org.sleuthkit.datamodel.IngestJobInfo.IngestJobStatusType enum.
+- **type_id** - Id for the ingest job status type
+- **type_name** - Internal name for the ingest job status type
+
+##  ingest_jobs
+One row is created each time ingest is started, which is a set of modules in a pipeline. 
+- **ingest_job_id** - Id of the ingest job
+- **obj_id** - Id of the data source ingest is being run on
+- **host_name** - Name of the host that is running the ingest job
+- **start_date_time** - Time the ingest job started (stored in number of milliseconds since Jan 1, 1970 UTC)
+- **end_date_time** - Time the ingest job finished (stored in number of milliseconds since Jan 1, 1970 UTC)
+- **status_id** - Ingest job status (references type_id from ingest_job_status_types)
+- **settings_dir** - Directory of the job's settings (may be an empty string)
+
+##  ingest_job_modules
+Defines the order of the modules in a given pipeline (i.e. ingest_job).
+- **ingest_job_id** - Id for the ingest job (references ingest_job_id in ingest_jobs)
+- **ingest_module_id** - Id of the ingest module (references ingest_module_id in ingest_modules)
+- **pipeline_position** - Order that the ingest module was run
 
-Please see the \ref db_schema_9_1_page page for all changes from schema 8.6 to schema 9.0.
 
 */
diff --git a/bindings/java/doxygen/schema/db_schema_9_1.dox b/bindings/java/doxygen/schema/db_schema_9_1.dox
deleted file mode 100644
index 0c7a48a203e5d1cea49d99636d7d12a2f9562f0e..0000000000000000000000000000000000000000
--- a/bindings/java/doxygen/schema/db_schema_9_1.dox
+++ /dev/null
@@ -1,543 +0,0 @@
-/*! \page db_schema_9_1_page TSK & Autopsy Database Schema (Schema version 9.1)
-
-[TOC]
-
-# Introduction
-
-This page outlines version 9.1 the database that is used by The Sleuth Kit and Autopsy. The goal of this page is to provide short descriptions for each table and column and not focus on foreign key requirements, etc. If you want that level of detail, then refer to the actual schema in addition to this. 
-
-Each Autopsy release is associated with a schema version with a major and minor version number. If a case with an older schema version is opened in a new version of Autopsy, the case will automatically be updated to the current schema. Going the other direction (opening a case that was created with a newer version of Autopsy), two things may happen:
-- If the case database has the same major number as the version of Autopsy being used, the case should generally be able to be opened and used.
-- If the case database has a higher major number than the version of Autopsy being used, an error will be displayed when attempting to open the case. 
-
-You can find a basic graphic of some of the table relationships <a href="https://docs.google.com/drawings/d/1omR_uUAp1fQt720oJ-kk8C48BXmVa3PNjPZCDdT0Tb4/edit?usp#sharing">here</a>
-
-
-Some general notes on this schema:
-- Nearly every type of data is assigned a unique ID, called the Object ID
-- The objects form a hierarchy, that shows where data came from.  A child comes from its parent.  
- - For example, disk images are the root, with a volume system below it, then a file system, and then files and directories. 
-- This schema has been designed to store data beyond the file system data that The Sleuth Kit supports. It can store carved files, a folder full of local files, etc.
-- The Blackboard is used to store artifacts, which contain attributes (name/value pairs).  Artifacts are used to store data types that do not have more formal tables. Module writers can make whatever artifact types they want. See \ref mod_bbpage for more details. 
-- The Sleuth Kit will make virtual files to span the unallocated space.  They will have a naming format of 'Unalloc_[PARENT-OBJECT-ID]_[BYTE-START]_[BYTE-END]'.
-
-# Schema Information
-
-This was a big change. Tables were added to support analysis results, OS accounts, hosts and person structure of data sources, and host addresses (IPs, DNS, etc.). The major component of the version number has been incremented because there are new org.sleuthkit.datamodel.TskData.ObjectType enum types (OsAccount and HostAddress). More information on how to use these new objects can be found on the \ref mod_dspage and \ref mod_os_accounts_page pages.
-
-<ul>
-<li><b>Autopsy versions: </b> Autopsy 4.19
-<li><b>Changes from version 8.6:</b>
-<ul>
-<li> New columns:
-<ul>
-<li>host_id, added_date_time, acquisition_tool_settings, acquisition_tool_name, acquisition_tool_version in data_source_info
-<li>category_type in artifact_types
-<li>owner_uid, os_account_obj_id in tsk_files
-</ul>
-<li> New tables:
-<ul>
-<li>tsk_aggregate_score
-<li>tsk_analysis_results
-<li>tsk_data_artifacts
-<li>tsk_file_attributes
-<li>tsk_hosts
-<li>tsk_host_addresses
-<li>tsk_host_address_dns_ip_map
-<li>tsk_host_address_usage
-<li>tsk_os_accounts
-<li>tsk_os_account_attributes
-<li>tsk_os_account_instances
-<li>tsk_os_account_realms
-<li>tsk_persons
-</ul>
-</ul>
-</ul>
-
-
-# General Information Tables 
-## tsk_db_info 
-Metadata about the database.
-- **schema_ver** - Major version number of the current database schema
-- **tsk_ver** - Version of TSK used to create database
-- **schema_minor_version** - Minor version number of the current database schema
-
-## tsk_db_info_extended
-Name & Value pair table to store any information about the database.  For example, which schema it was created with. etc. 
-- **name** - Any string name
-- **value** - Any string value
-
-
-# Object Tables 
-## tsk_objects 
-Every object (image, volume system, file, etc.) has an entry in this table.  This table allows you to find the parent of a given object and allows objects to be tagged and have children.  This table provides items with a unique object id.  The details of the object are in other tables.  
-- **obj_id** - Unique id 
-- **par_obj_id** - The object id of the parent object (NULL for root objects). The parent of a volume system is an image, the parent of a directory is a directory or filesystem, the parent of a filesystem is a volume or an image, etc.
-- **type** - Object type (as org.sleuthkit.datamodel.TskData.ObjectType enum)
-
-
-# Hosts / Persons
-Stores data related to hosts and persons, which can help organize data sources. 
-
-## tsk_persons
-Stores persons for the case. A peron is someone who owns or used a data source in the case. 
-- **id** - Id of the person
-- **name** - Name of the person (should be human readable)
-
-## tsk_hosts
-Stores hosts that have a data source in the case. Each data source must be associated with a host.  These are NOT created for a reference to an external host (such as a web domain). 
-- **id** - Id of the host
-- **name** - Name of the host (should be human readable)
-- **db_status** - Status of the host (active/merged/deleted as org.sleuthkit.datamodel.Host.HostDbStatus)
-- **person_id** - Optional id of associated person
-- **merged_into** - Stores the host ID that this host was merged into
-
-# Data Source / Device Tables 
-## data_source_info
-Contains information about a data source, which could be an image.  This is where we group data sources into devices (based on device ID).
-- **obj_id** - Id of image/data source in tsk_objects
-- **device_id** - Unique ID (GUID) for the device that contains the data source
-- **time_zone** - Timezone that the data source was originally located in
-- **acquisition_details** - Notes on the acquisition of the data source
-- **added_date_time** - Timestamp of when the data source was added
-- **acquisition_tool_name** - Name of the tool used to acquire the image
-- **acquisition_tool_settings** - Specific settings used by the tool to acquire the image
-- **acquisition_tool_version** - Version of the acquisition tool
-- **host_id** - Host associated with this image (must be set)
-
-
-# Disk Image Tables
-
-## tsk_image_info 
-Contains information about each set of images that is stored in the database. 
-- **obj_id** - Id of image in tsk_objects
-- **type** - Type of disk image format (as org.sleuthkit.datamodel.TskData.TSK_IMG_TYPE_ENUM)
-- **ssize** - Sector size of device in bytes
-- **tzone** - Timezone where image is from (the same format that TSK tools want as input)
-- **size** - Size of the original image (in bytes) 
-- **md5** - MD5 hash of the image (for compressed data such as E01, the hashes are of the decompressed image, not the E01 itself)
-- **sha1** - SHA-1 hash of the image
-- **sha256** - SHA-256 hash of the image
-- **display_name** - Display name of the image
-
-## tsk_image_names
-Stores path(s) to file(s) on disk that make up an image set.
-- **obj_id** - Id of image in tsk_objects
-- **name** - Path to location of image file on disk
-- **sequence** - Position in sequence of image parts
-
-
-# Volume System Tables
-## tsk_vs_info
-Contains one row for every volume system found in the images.
-- **obj_id** - Id of volume system in tsk_objects
-- **vs_type** - Type of volume system / media management (as org.sleuthkit.datamodel.TskData.TSK_VS_TYPE_ENUM)
-- **img_offset** - Byte offset where VS starts in disk image
-- **block_size** - Size of blocks in bytes
-
-## tsk_vs_parts
-Contains one row for every volume / partition in the images. 
-- **obj_id** - Id of volume in tsk_objects
-- **addr** - Address of the partition
-- **start** - Sector offset of start of partition
-- **length** - Number of sectors in partition
-- **desc** - Description of partition (volume system type-specific)
-- **flags** - Flags for partition (as org.sleuthkit.datamodel.TskData.TSK_VS_PART_FLAG_ENUM)
-
-## tsk_pool_info 
-Contains information about pools (for APFS, logical disk management, etc.)
-- **obj_id** - Id of pool in tsk_objects
-- **pool_type** - Type of pool (as org.sleuthkit.datamodel.TskData.TSK_POOL_TYPE_ENUM)
-
-# File System Tables
-## tsk_fs_info
-Contains one for for every file system in the images. 
-- **obj_id** - Id of filesystem in tsk_objects
-- **data_source_obj_id** - Id of the data source for the file system
-- **img_offset** - Byte offset that filesystem starts at
-- **fs_type** - Type of file system (as org.sleuthkit.datamodel.TskData.TSK_FS_TYPE_ENUM)
-- **block_size** - Size of each block (in bytes)
-- **block_count** - Number of blocks in filesystem
-- **root_inum** - Metadata address of root directory
-- **first_inum** - First valid metadata address
-- **last_inum** - Last valid metadata address
-- **display_name** - Display name of file system (could be volume label)
-
-## tsk_files
-Contains one for for every file found in the images.  Has the basic metadata for the file. 
-- **obj_id** - Id of file in tsk_objects
-- **fs_obj_id** - Id of filesystem in tsk_objects (NULL if file is not located in a file system -- carved in unpartitioned space, etc.)
-- **data_source_obj_id** - Id of the data source for the file
-- **attr_type** - Type of attribute (as org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM)
-- **attr_id** - Id of attribute
-- **name** - Name of attribute. Will be NULL if attribute doesn't have a name.  Must not have any slashes in it. 
-- **meta_addr** - Address of the metadata structure that the name points to
-- **meta_seq** - Sequence of the metadata address
-- **type** - Type of file: filesystem, carved, etc. (as org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM enum)
-- **has_layout** - True if file has an entry in tsk_file_layout
-- **has_path** - True if file has an entry in tsk_files_path
-- **dir_type** - File type information: directory, file, etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM)
-- **meta_type** - File type (as org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM)
-- **dir_flags** -  Flags that describe allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM)
-- **meta_flags** - Flags for the file for its allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM)
-- **size** - File size in bytes
-- **ctime** - Last file / metadata status change time (stored in number of seconds since Jan 1, 1970 UTC)
-- **crtime** - Created time
-- **atime** - Last file content accessed time
-- **mtime** - Last file content modification time
-- **mode** - Unix-style permissions (as org.sleuthkit.datamodel.TskData.TSK_FS_META_MODE_ENUM)
-- **uid** - Owner id
-- **gid** - Group id
-- **md5** - MD5 hash of file contents
-- **sha256** - SHA-256 hash of file contents
-- **known** - Known status of file (as org.sleuthkit.datamodel.TskData.FileKnown)
-- **parent_path** - Full path of parent folder. Must begin and end with a '/' (Note that a single '/' is valid)
-- **mime_type** - MIME type of the file content, if it has been detected. 
-- **extension** - File extension
-- **owner_uid** - Unique ID of the owner (SID in Windows)
-- **os_account_obj_id** - ID of optional associated OS account
-
-## tsk_file_layout
-Stores the layout of a file within the image.  A file will have one or more rows in this table depending on how fragmented it was. All file types use this table (file system, carved, unallocated blocks, etc.).
-- **obj_id** - Id of file in tsk_objects
-- **sequence** - Position of the run in the file (0-based and the obj_id and sequence pair will be unique in the table)
-- **byte_start** - Byte offset of fragment relative to the start of the image file
-- **byte_len** - Length of fragment in bytes
-
-
-## tsk_files_path
-If a "locally-stored" file has been imported into the database for analysis, then this table stores its path.  Used for derived files and other files that are not directly in the image file.
-- **obj_id** - Id of file in tsk_objects
-- **path** - Path to where the file is locally stored in a file system
-- **encoding_type** - Method used to store the file on the disk 
-
-## file_encoding_types 
-Methods that can be used to store files on local disks to prevent them from being quarantined by antivirus
-- **encoding_type** - ID of method used to store data.  See org.sleuthkit.datamodel.TskData.EncodingType enum 
-- **name** -  Display name of technique
-
-## tsk_file_attributes
-Stores extended attributes for a particular file that do not have a column in tsk_files. Custom BlackboardAttribute types can be defined. 
-- **id** - Id of the attribute
-- **obj_id** - File this attribute is associated with (references tsk_files)
-- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
-- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
-- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
-- **value_text** - A string of text (should be NULL unless the value type is string)
-- **value_int32** - An integer (should be NULL unless the value type is int)
-- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
-- **value_double** - A double (should be NULL unless the value type is double)
-
-## tsk_files_derived_method
-Derived files are those that result from analyzing another file.  For example, files that are extracted from a ZIP file will be considered derived.  This table keeps track of the derivation techniques that were used to make the derived files. 
-
-NOTE: This table is not used in any code.
-
-- **derived_id** - Unique id for the derivation method. 
-- **tool_name** - Name of derivation method/tool
-- **tool_version** - Version of tool used in derivation method
-- **other** - Other details
-
-## tsk_files_derived
-Each derived file has a row that captures the information needed to re-derive it
-
-NOTE: This table is not used in any code.
-
-- **obj_id** - Id of file in tsk_objects
-- **derived_id** - Id of derivation method in tsk_files_derived_method
-- **rederive** - Details needed to re-derive file (will be specific to the derivation method)
-
-
-# Blackboard Tables 
-The \ref mod_bbpage "Blackboard" is used to store results and derived data from analysis modules. 
-
-## blackboard_artifacts
-Stores artifacts associated with objects. 
-- **artifact_id** - Id of the artifact (assigned by the database)
-- **obj_id** - Id of the associated object
-- **artifact_obj_id** - Object id of the artifact
-- **artifact_type_id** - Id for the type of artifact (can be looked up in the blackboard_artifact_types table)
-- **data_source_obj_id** - Id of the data source for the artifact
-- **artifact_type_id** - Type of artifact (references artifact_type_id in blackboard_artifact_types)
-- **review_status_id** - Review status (references review_status_id in review_statuses)
-
-## tsk_analysis_results
-Additional information for artifacts that are analysis results
-- **artifact_obj_id** - Object id of the associated artifact (artifact_obj_id column in blackboard_artifacts)
-- **significance** - Significance to show if the result shows the object is relevant (as org.sleuthkit.datamodel.Score.Significance enum)
-- **method_category** - Category of the analysis method used (as org.sleuthkit.datamodel.Score.MethodCategory enum)
-- **conclusion** - Optional, text description of the conclusion of the analysis method. 
-- **configuration** - Otional, text description of the analysis method configuration (such as what hash set or keyword list was used)
-- **justification** - Optional, text description of justification of the conclusion and significance. 
-- **ignore_score** - True (1) if score should be ignored when calculating aggregate score, false (0) otherwise. This allows users to ignore a false positive.
-
-## tsk_data_artifacts
-Additional information for artifacts that store extracted data. 
-- **artifact_obj_id** - Object id of the associated artifact (artifact_obj_id column in blackboard_artifacts)
-- **os_account_obj_id** - Object id of the associated OS account
-
-## blackboard_artifact_types
-Types of artifacts
-- **artifact_type_id** - Id for the type (this is used by the blackboard_artifacts table)
-- **type_name** - A string identifier for the type (unique)
-- **display_name** - A display name for the type (not unique, should be human readable)
-- **category_type** - Indicates whether this is a data artifact or an analysis result
-
-## blackboard_attributes
-Stores name value pairs associated with an artifact. Only one of the value columns should be populated.
-- **artifact_id** - Id of the associated artifact
-- **artifact_type_id** - Artifact type of the associated artifact
-- **source** - Source string, should be module name that created the entry
-- **context** - Additional context string
-- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
-- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
-- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
-- **value_text** - A string of text (should be NULL unless the value type is string)
-- **value_int32** - An integer (should be NULL unless the value type is int)
-- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
-- **value_double** - A double (should be NULL unless the value type is double)
-
-## blackboard_attribute_types
-Types of attribute
-- **attribute_type_id** - Id for the type (this is used by the blackboard_attributes table)
-- **type_name** - A string identifier for the type (unique)
-- **display_name** - A display name for the type (not unique, should be human readable)
-- **value_type** - Expected type of data for the attribute type (see blackboard_attributes)
-
-## review_statuses
-Review status of an artifact. Should mirror the org.sleuthkit.datamodel.BlackboardArtifact.ReviewStatus enum.
-- **review_status_id** - Id of the status 
-- **review_status_name** - Internal name of the status
-- **display_name** - Display name (should be human readable)
-
-## tsk_aggregate_score
-Stores the score of an object that is a combination of the various analysis result scores
-- **obj_id** - Id of the object that corresponds to this score
-- **data_source_obj_id** - Id of the data source the object belongs to
-- **significance** - Significance (as org.sleuthkit.datamodel.Score.Significance enum)
-- **method_category** - Category of the method used (as org.sleuthkit.datamodel.Score.MethodCategory enum)
-
-
-
-# Host Addresses
-Host addresses are various forms of identifiers assigned to a computer, such as host names or MAC addresses. These tables store data that is also stored in the data artifacts, but these tables allow for correlation and scoring of specific hosts. 
-
-## tsk_host_addresses
-One entry is created in this table for each host address found in the data source.  Examples include domain names (www.sleuthkit.org), IP addresses, and BlueTooth MAC addresses.
-- **id** - Id of the host address
-- **address_type** - Type of address (as org.sleuthkit.datamodel.HostAddress.HostAddressType enum)
-- **address** - Address (must be unique within the scope of address_type). 
-
-## tsk_host_address_dns_ip_map
-Stores data if host names and IP addresses were resolved between each other. 
-- **id** - Id of the mapping
-- **dns_address_id** - Id of the DNS address in tsk_host_addresses
-- **ip_address_id** - Id of the IP address in tsk_host_addresses
-- **source_obj_id** - Id of the object used to determine this mapping (references tsk_objects)
-- **time** - Timestamp when this mapping was recorded
-
-## tsk_host_address_usage
-Tracks which artifacts and files had a reference to a given host address. This is used to show what other artifacts used the same address. 
-- **id** - Id of the usage
-- **addr_obj_id** - Id of the host address
-- **obj_id** - Id of the object that had a reference/usage to the address (references tsk_objects)
-- **data_source_obj_id** - Id of the data source associated with the usage
-
-
-# Operating System Accounts
-Stores data related to operating system accounts.  Communication-related accounts (such as email or social media) are stored in other tables (see Communication Acccounts below).
-
-
-## tsk_os_account_realms
-Every OS Account must belong to a realm, which defines the scope of the account.  Realms can be local to a given computer or domain-based. 
-- **realm_name** - Display bame of the realm (realm_name or realm_addr must be set)
-- **realm_addr** - Address/ID of the realm (realm_name or realm_addr must be set)
-- **realm_signature** - Used internally for unique clause.  realm_addr if it is set.  Otherwise, realm_name.
-- **scope_host_id** - Optional host that this realm is scoped to.  By default, realms are scoped to a given host. 
-- **scope_confidence** - Confidence of the scope of the realm (as org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence enum)
-- **db_status** - Status of this realm in the database (as org.sleuthkit.datamodel.OsAccountRealm.RealmDbStatus enum)
-- **merged_into** - For merged realms, set to the id of the realm they were merged in to.
-
-## tsk_os_accounts
-Stores operating system accounts
-- **os_account_obj_id** - Id of the OS account
-- **realm_id** - Id of the associated realm (references tsk_os_account_realms)
-- **login_name** - Login name (login name or addr must be present)
-- **addr** - Address/ID of account (login name or addr must be present)
-- **signature** - Used internally for unique clause
-- **full_name** - Full name
-- **status** - Status of the account (as org.sleuthkit.datamodel.OsAccount.OsAccountStatus enum)
-- **type** - Type of account (as org.sleuthkit.datamodel.OsAccount.OsAccountType enum)
-- **created_date** - Timestamp of account creation
-- **db_status** - Status of this account in the database (active/merged/deleted)
-- **merged_into** - For merged accounts, set to the id of the account they were merged in to.
-
-## tsk_os_account_attributes
-Stores additional attributes for an OS account. Similar to blackboard_attributes. Attributes can either be specific to a host or domain-scoped. 
-- **id** - Id of the attribute
-- **os_account_obj_id** - Id of the associated OS account
-- **host_id** - Host Id if the attribute is scoped to the host.  NULL if the attribute is domain-scoped.
-- **source_obj_id** - Optional object id of where the attribute data was derived from (such as a registry hive) (references tsk_objects)
-- **attribute_type_id** - Type of attribute (see org.sleuthkit.datamodel.BlackboardAttribute.BlackboardAttribute.Type)
-- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
-- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
-- **value_text** - A string of text (should be NULL unless the value type is string)
-- **value_int32** - An integer (should be NULL unless the value type is int)
-- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
-- **value_double** - A double (should be NULL unless the value type is double)
-
-## tsk_os_account_instances
-Records that an OS account is associated with a specific data source.  For example, the account logged in, accessed data, etc. 
-- **id** - Id of the OS account instance
-- **os_account_obj_id** - Id of the OS account that was referenced
-- **data_source_obj_id** - Id of the data source
-- **instance_type** - Type of instance (as org.sleuthkit.datamodel.OsAccountInstance.OsAccountInstanceType enum)
-
-
-# Communication Accounts
-Stores data related to communications between two parties. It is highly recommended to use 
-the org.sleuthkit.datamodel.CommunicationsManager API to create/access this type of data
-(see the \ref mod_compage page).
-
-## accounts
-Stores communication accounts (email, phone number, etc.).  Note that this does not include OS accounts. 
-- **account_id** - Id for the account within the scope of the database (i.e. Row Id) (used in the account_relationships table)
-- **account_type_id** - The type of account (must match an account_type_id entry from the account_types table)
-- **account_unique_identifier** - The phone number/email/other identifier associated with the account that is unique within the Account Type 
-
-## account_types
-Types of accounts and service providers (Phone, email, Twitter, Facebook, etc.)
-- **account_type_id** - Id for the type (this is used by the accounts table)
-- **type_name** - A string identifier for the type (unique)
-- **display_name** - A display name for the type (not unique, should be human readable)
-
-## account_relationships
-Stores non-directional relationships between two accounts if they communicated or had references to each other (such as contact book)
-- **relationship_id** -  Id for the relationship
-- **account1_id** - Id of the first participant (from account_id column in accounts table)
-- **account2_id** - Id of the second participant (from account_id column in accounts table)
-- **relationship_source_obj_id** - Id of the artifact this relationship was derived from (artifact_id column from the blackboard_artifacts)
-- **date_time** - Time the communication took place, stored in number of seconds since Jan 1, 1970 UTC (NULL if unknown)
-- **relationship_type** - The type of relationship (as org.sleuthkit.datamodel.Relationship.Type)
-- **data_source_obj_id** - Id of the data source this relationship came from (from obj_id in data_source_info)
-
-# Timeline
-Stores data used to populate various timelines. Two tables are used to reduce data duplication. It is highly recommended to use 
-the org.sleuthkit.datamodel.TimelineManager API to create/access this type of data.  
-
-## tsk_event_types
-Stores the types for events. The super_type_id column is used to arrange the types into a tree.
-- **event_type_id** - Id for the type
-- **display_name** - Display name for the type (unique, should be human readable)
-- **super_type_id** - Parent type for the type (used for building heirarchy; references the event_type_id in this table)
-
-## tsk_event_descriptions
-Stores descriptions of an event. This table exists to reduce duplicate data that is common to events. For example, a file will have only one row in tsk_event_descriptions, but could have 4+ rows in tsk_events that all refer to the same description. Note that the combination of the full_description, content_obj_id, and artifact_id columns must be unique.
-- **event_description_id** - Id for the event description
-- **full_description** - Full length description of the event (required).  For example, the full file path including file name. 
-- **med_description** - Medium length description of the event (may be null).  For example, a file may have only the first three folder names.
-- **short_description** - Short length description of the event (may be null).  For example, a file may have only its first folder name. 
-- **data_source_obj_id** -  Object id of the data source for the event source (references obj_id column in data_source_info)
-- **content_obj_id** - If the event is from a non-artifact, then this is the object id from that source.  If the event is from an artifact, then this is the object id of the artifact's source. (references obj_id column in tsk_objects)
-- **artifact_id** - If the event is from a non-artifact, this is null. If the event is from an artifact, then this is the id of the artifact (references artifact_id column in blackboard_artifacts) (may be null)
-- **hash_hit** - 1 if the file associated with the event has a hash set hit, 0 otherwise
-- **tagged** - 1 if the direct source of the event has been tagged, 0 otherwise
-
-## tsk_events
-Stores each event. A file, artifact, or other type of content can have several rows in this table. One for each time stamp. 
-- **event_id** - Id for the event
-- **event_type_id** - Event type id (references event_type_id column in tsk_event_types)
-- **event_description_id** - Event description id (references event_description_id column in tsk_event_descriptions)
-- **time** -  Time the event occurred, in seconds from the UNIX epoch
-
-# Examiners and Reports
-
-## tsk_examiners
-Encapsulates the concept of an examiner associated with a case.
-- **examiner_id** - Id for the examiner
-- **login_name** - Login name for the examiner (must be unique)
-- **display_name** - Display name for the examiner (may be null)
-
-## reports
-Stores information on generated reports.
-- **obj_id** - Id of the report
-- **path** - Full path to the report (including file name)
-- **crtime** - Time the report was created, in seconds from the UNIX epoch
-- **src_module_name** - Name of the module that created the report
-- **report_name** - Name of the report (can be empty string)
-
-# Tags 
-
-## tag_names
-Defines what tag names the user has created and can therefore be applied.
-- **tag_name_id** - Unique ID for each tag name
-- **display_name** - Display name of tag
-- **description**  - Description  (can be empty string)
-- **color** - Color choice for tag (can be empty string)
-- **knownStatus** - Stores whether a tag is notable/bad (as org.sleuthkit.datamodel.TskData.FileKnown enum)
-- **tag_set_id** - Id of the tag set the tag name belongs to (references tag_set_id in tsk_tag_sets, may be null)
-- **rank** - Used to order the tag names for a given tag set for display purposes
-
-## tsk_tag_sets
-Used to group entries from the tag_names table. An object can have only one tag from a tag set at a time. 
-- **tag_set_id** - Id of the tag set
-- **name** - Name of the tag set (unique, should be human readable)
-
-## content_tags
-One row for each file tagged.  
-- **tag_id** - unique ID
-- **obj_id** - object id of Content that has been tagged
-- **tag_name_id** - Tag name that was used
-- **comment**  - optional comment 
-- **begin_byte_offset** - optional byte offset into file that was tagged
-- **end_byte_offset** - optional byte ending offset into file that was tagged
-- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
-
-## blackboard_artifact_tags
-One row for each artifact that is tagged.
-- **tag_id** - unique ID
-- **artifact_id** - Artifact ID of artifact that was tagged
-- **tag_name_id** - Tag name that was used
-- **comment** - Optional comment
-- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
-
-
-# Ingest Module Status
-These tables keep track in Autopsy which modules were run on the data sources.
-
-## ingest_module_types
-Defines the types of ingest modules supported. Must exactly match the names and ordering in the org.sleuthkit.datamodel.IngestModuleInfo.IngestModuleType enum.
-- **type_id** - Id for the ingest module type
-- **type_name** - Internal name for the ingest module type
-
-## ingest_modules
-Defines which modules were installed and run on at least one data source.  One row for each module. 
-- **ingest_module_id** - Id of the ingest module
-- **display_name** - Display name for the ingest module (should be human readable)
-- **unique_name** - Unique name for the ingest module
-- **type_id** - Type of ingest module (references type_id from ingest_module_types)
-- **version** - Version of the ingest module
-
-## ingest_job_status_types
-Defines the status options for ingest jobs. Must match the names and ordering in the org.sleuthkit.datamodel.IngestJobInfo.IngestJobStatusType enum.
-- **type_id** - Id for the ingest job status type
-- **type_name** - Internal name for the ingest job status type
-
-##  ingest_jobs
-One row is created each time ingest is started, which is a set of modules in a pipeline. 
-- **ingest_job_id** - Id of the ingest job
-- **obj_id** - Id of the data source ingest is being run on
-- **host_name** - Name of the host that is running the ingest job
-- **start_date_time** - Time the ingest job started (stored in number of milliseconds since Jan 1, 1970 UTC)
-- **end_date_time** - Time the ingest job finished (stored in number of milliseconds since Jan 1, 1970 UTC)
-- **status_id** - Ingest job status (references type_id from ingest_job_status_types)
-- **settings_dir** - Directory of the job's settings (may be an empty string)
-
-##  ingest_job_modules
-Defines the order of the modules in a given pipeline (i.e. ingest_job).
-- **ingest_job_id** - Id for the ingest job (references ingest_job_id in ingest_jobs)
-- **ingest_module_id** - Id of the ingest module (references ingest_module_id in ingest_modules)
-- **pipeline_position** - Order that the ingest module was run
-
-
-*/
diff --git a/bindings/java/doxygen/schema/schema_list.dox b/bindings/java/doxygen/schema/schema_list.dox
index d985ee90e80aea3520b6ac42382136572dc24e1e..088b447872f1ffdf6dbf247e93ae8ef73abc10ad 100644
--- a/bindings/java/doxygen/schema/schema_list.dox
+++ b/bindings/java/doxygen/schema/schema_list.dox
@@ -3,7 +3,7 @@
 This page contians links to the documention for selected versions of the TSK & Autopsy database schema.
 
 - Current Schema
- - \subpage db_schema_9_1_page 
+ - \subpage db_schema_9_0_page 
  
 - Older Schemas
  - \subpage db_schema_8_6_page 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java b/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java
index fba4638721a0b4b4398fc496c840e68287326d81..c37a5463a5e17605367f89457dc3b661d714a4e4 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java
@@ -18,7 +18,6 @@
  */
 package org.sleuthkit.datamodel;
 
-import com.google.common.base.Optional;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -345,11 +344,11 @@ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactTyp
 	}
 
 	@Override
-	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
-		DataArtifact artifact =  db.getBlackboard().newDataArtifact(artifactType, objId, this.getDataSource().getId(), attributesList, osAccountId);
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, OsAccount osAccount) throws TskCoreException {
 
-		if(osAccountId != null) {
-			db.getOsAccountManager().newOsAccountInstance(osAccountId, getDataSource().getId(), OsAccountInstance.OsAccountInstanceType.LAUNCHED);
+		DataArtifact artifact =  db.getBlackboard().newDataArtifact(artifactType, objId, this.getDataSource().getId(), attributesList, osAccount);
+		if(osAccount != null) {
+			db.getOsAccountManager().newOsAccountInstance(osAccount, (DataSource)getDataSource(), OsAccountInstance.OsAccountInstanceType.LAUNCHED);
 		}
 		return artifact;
 	}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java b/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java
index c4ffa584ecb35fea3f07991f7327042fc916b451..dbeb39670e7300bf883a981e46c2fad79f0f186a 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java
@@ -1412,23 +1412,6 @@ public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreExceptio
 		return getSleuthkitCase().newBlackboardArtifact(artifactTypeID, getId(), dataSourceObjectId);
 	}
 
-	/**
-	 * Create and add a data artifact associated with this abstract file. This
-	 * method creates the data artifact with the os account id associated with
-	 * this abstract file if one exits.
-	 *
-	 * @param artifactType   Type of data artifact to create.
-	 * @param attributesList Additional attributes to attach to this data
-	 *                       artifact.
-	 *
-	 * @return DataArtifact New data artifact.
-	 *
-	 * @throws TskCoreException If a critical error occurred within tsk core.
-	 */
-	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
-		return super.newDataArtifact(artifactType, attributesList, osAccountObjId);
-	}
-
 	/**
 	 * Initializes common fields used by AbstactFile implementations (objects in
 	 * tsk_files table)
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java b/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java
index 647ea455955609c25f5081b674a3ebc7ac757033..288478700b17951d2a42b52db89d2bfec5040327 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java
@@ -962,24 +962,29 @@ public static final class BlackboardException extends Exception {
 	 *                        belongs to, may be the same as the sourceObjId.
 	 *                        May be null.
 	 * @param attributes      The attributes. May be empty or null.
-	 * @param osAccountId     The OS account id associated with the artifact.
-	 *                        May be null.
+	 * @param osAccount       The OS account associated with the artifact. May
+	 *                        be null.
 	 *
 	 * @return DataArtifact A new data artifact.
 	 *
 	 * @throws TskCoreException If a critical error occurs within tsk core.
 	 */
 	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId,
-			Collection<BlackboardAttribute> attributes, Long osAccountId) throws TskCoreException {
+			Collection<BlackboardAttribute> attributes, OsAccount osAccount) throws TskCoreException {
 
 		if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) {
 			throw new TskCoreException(String.format("Artifact type (name = %s) is not of Data Artifact category. ", artifactType.getTypeName()));
 		}
 
+		Long osAccountObjdId = null;
+		if (osAccount != null) {
+			osAccountObjdId = osAccount.getId();
+		}
+
 		CaseDbTransaction transaction = caseDb.beginTransaction();
 		try {
 			DataArtifact dataArtifact = newDataArtifact(artifactType, sourceObjId, dataSourceObjId,
-					attributes, osAccountId, transaction);
+					attributes, osAccountObjdId, transaction);
 			transaction.commit();
 			return dataArtifact;
 		} catch (TskCoreException ex) {
diff --git a/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java b/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java
index a43ce6d2952309dde05b300a160641785b8ec8ab..c71423cb4f0ab49f01e70199f0f2f5f67a2d30fa 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java
@@ -699,7 +699,8 @@ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactTyp
 	}
 
 	@Override
-	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, OsAccount osAccount) throws TskCoreException {
+
 		throw new TskCoreException("Cannot create data artifact of an artifact. Not supported.");
 	}
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Content.java b/bindings/java/src/org/sleuthkit/datamodel/Content.java
index ca10cd5711c663d41cdb162ece05525b5ca25b78..2d27f2a7698a0aabab3c21f20ecb5915767bea52 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Content.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Content.java
@@ -1,15 +1,15 @@
 /*
  * Sleuth Kit Data Model
- *
+ * 
  * Copyright 2011-2016 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
- *
+ * 
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- *
+ * 
  *     http://www.apache.org/licenses/LICENSE-2.0
- *
+ * 
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -179,7 +179,7 @@ public interface Content extends SleuthkitVisitableItem {
 	 * Create and add an analysis result associated with this content.
 	 *
 	 *
-	 * @param artifactType	  Type of analysis result artifact to create.
+	 * @param artifactType	 Type of analysis result artifact to create.
 	 * @param score          Score associated with this analysis.
 	 * @param conclusion     Conclusion from the analysis, may be empty.
 	 * @param configuration  Configuration element associated with this
@@ -188,8 +188,8 @@ public interface Content extends SleuthkitVisitableItem {
 	 * @param attributesList Additional attributes to attach to this analysis
 	 *                       result artifact.
 	 *
-	 * @return AnalysisResultAdded The analysis return added and the current
-	 *         aggregate score of content.
+	 * @return AnalysisResultAdded The analysis return added and the
+         current aggregate score of content.
 	 *
 	 * @throws TskCoreException if critical error occurred within tsk core.
 	 */
@@ -201,24 +201,24 @@ public interface Content extends SleuthkitVisitableItem {
 	 * @param artifactType   Type of analysis result artifact to create.
 	 * @param attributesList Additional attributes to attach to this data
 	 *                       artifact.
-	 * @param osAccountId    The OS account id associated with the artifact. May
-	 *                       be null.
+	 * @param osAccount      The OS account associated with the artifact. May be
+	 *                       null.
 	 *
 	 * @return DataArtifact New data artifact.
 	 *
 	 * @throws TskCoreException If a critical error occurred within tsk core.
 	 */
-	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException;
-
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, OsAccount osAccount) throws TskCoreException;
+	
 	/**
 	 * Returns the final score for the content object.
-	 *
+	 * 
 	 * @return Score.
-	 *
+	 * 
 	 * @throws TskCoreException if critical error occurred within tsk core.
 	 */
 	public Score getAggregateScore() throws TskCoreException;
-
+	
 	/**
 	 * Get all artifacts associated with this content that have the given type
 	 * name
@@ -232,17 +232,16 @@ public interface Content extends SleuthkitVisitableItem {
 	public ArrayList<BlackboardArtifact> getArtifacts(String artifactTypeName) throws TskCoreException;
 
 	/**
-	 * Get all analysis results associated with this content, that have the
-	 * given type.
+	 * Get all analysis results associated with this content, that have the given type.
 	 *
-	 * @param artifactType Type to look up.
+	 * @param artifactType  Type to look up.
 	 *
 	 * @return A list of analysis result artifacts matching the type.
 	 *
 	 * @throws TskCoreException If critical error occurred within tsk core.
 	 */
 	public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException;
-
+	
 	/**
 	 * Return the TSK_GEN_INFO artifact for the file so that individual
 	 * attributes can be added to it. Creates one if it does not already exist.
@@ -318,7 +317,7 @@ public interface Content extends SleuthkitVisitableItem {
 	 * @throws TskCoreException If critical error occurred within tsk core.
 	 */
 	public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException;
-
+	
 	/**
 	 * Get the names of all the hashsets that this content is in.
 	 *
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Host.java b/bindings/java/src/org/sleuthkit/datamodel/Host.java
index 35f4eaa58fd0b79257fc68bb7752a6dab77b727b..37292e4960faace8f8b79d90cddbe305d1b44b3a 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Host.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Host.java
@@ -101,7 +101,7 @@ public boolean equals(Object obj) {
 	/**
 	 * Encapsulates status of host row.
 	 */
-	enum HostDbStatus {
+	public enum HostDbStatus {
 		ACTIVE(0, "Active"),
 		MERGED(1, "Merged"),
 		DELETED(2, "Deleted");
@@ -114,7 +114,7 @@ enum HostDbStatus {
 			this.name = name;
 		}
 
-		int getId() {
+		public int getId() {
 			return id;
 		}
 
@@ -122,7 +122,7 @@ String getName() {
 			return name;
 		}
 
-		static HostDbStatus fromID(int typeId) {
+		public static HostDbStatus fromID(int typeId) {
 			for (HostDbStatus type : HostDbStatus.values()) {
 				if (type.ordinal() == typeId) {
 					return type;
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java
index 70dfa7df4bf1288aecd18ca3a1a183d23d9da90e..b9fd37885012b07b9f5f969e3d78fd8dba87423a 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java
@@ -58,6 +58,7 @@ public final class OsAccount extends AbstractContent {
 	private final Long creationTime;
 
 	private List<OsAccountAttribute> osAccountAttributes = null;
+	private List<OsAccountInstance> osAccountInstances = null;
 
 	/**
 	 * Encapsulates status of an account - whether is it active or disabled or
@@ -117,7 +118,7 @@ public static OsAccountStatus fromID(int statusId) {
 	 * are generally invisible - they will not be returned by any queries on the
 	 * string fields.
 	 */
-	enum OsAccountDbStatus {
+	public enum OsAccountDbStatus {
 		ACTIVE(0, "Active"),
 		MERGED(1, "Merged"),
 		DELETED(2, "Deleted");
@@ -130,7 +131,7 @@ enum OsAccountDbStatus {
 			this.name = name;
 		}
 
-		int getId() {
+		public int getId() {
 			return id;
 		}
 
@@ -138,7 +139,7 @@ String getName() {
 			return name;
 		}
 
-		static OsAccountDbStatus fromID(int typeId) {
+		public static OsAccountDbStatus fromID(int typeId) {
 			for (OsAccountDbStatus type : OsAccountDbStatus.values()) {
 				if (type.ordinal() == typeId) {
 					return type;
@@ -247,6 +248,16 @@ synchronized void setAttributesInternal(List<OsAccountAttribute> osAccountAttrib
 		this.osAccountAttributes = osAccountAttributes;
 	}
 
+	/**
+	 * This function is used by OsAccountManger to update the list of OsAccount
+	 * instances.
+	 *
+	 * @param osAccountInstanes The osAccount instances that are to be added.
+	 */
+	synchronized void setInstancesInternal(List<OsAccountInstance> osAccountInstances) {
+		this.osAccountInstances = osAccountInstances;
+	}
+
 	/**
 	 * Get the account Object Id that is unique within the scope of the case.
 	 *
@@ -362,7 +373,11 @@ public synchronized List<OsAccountAttribute> getExtendedOsAccountAttributes() th
 	 * @throws TskCoreException
 	 */
 	public synchronized List<OsAccountInstance> getOsAccountInstances() throws TskCoreException {
-		return sleuthkitCase.getOsAccountManager().getOsAccountInstances(this);
+		if (osAccountInstances == null) {
+			osAccountInstances = sleuthkitCase.getOsAccountManager().getOsAccountInstances(this);
+		}
+
+		return Collections.unmodifiableList(osAccountInstances);
 	}
 
 	/**
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java
index ab51a5dbc4b77b3f8168950ce82263120201ff31..aa9aa302c84ea27ba57981b4700a4b77fd65013f 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java
@@ -27,28 +27,39 @@
  */
 public class OsAccountInstance implements Comparable<OsAccountInstance> {
 
-	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
+	private DataSource dataSource = null;
+	private final OsAccount account;
+	private final OsAccountInstanceType instanceType;
 
-	private final SleuthkitCase skCase;
-	private final long accountId;
 	private final long dataSourceId;
-	private final OsAccountInstanceType instanceType;
 
-	private OsAccount account;
-	private DataSource dataSource = null;
+	private SleuthkitCase skCase;
+
+	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
 
 	/**
 	 * Construct with OsAccount and DataSource instances.
 	 *
-	 * @param skCase       The case instance.
 	 * @param account      The instance account.
 	 * @param dataSource   The instance data source
 	 * @param instanceType The instance type.
 	 */
-	OsAccountInstance(SleuthkitCase skCase, OsAccount account, DataSource dataSource, OsAccountInstanceType instanceType) {
-		this(skCase, account.getId(), dataSource.getId(), instanceType);
+	OsAccountInstance(OsAccount account, DataSource dataSource, OsAccountInstanceType instanceType) {
+		this(account, dataSource.getId(), instanceType);
 		this.dataSource = dataSource;
+	}
+
+	/**
+	 * Construct with OsAccount and DataSource instances.
+	 *
+	 * @param account         The instance account.
+	 * @param dataSourceObjId The instance data source object id.
+	 * @param instanceType    The instance type.
+	 */
+	OsAccountInstance(OsAccount account, long dataSourceObjId, OsAccountInstanceType instanceType) {
 		this.account = account;
+		this.dataSourceId = dataSourceObjId;
+		this.instanceType = instanceType;
 	}
 
 	/**
@@ -61,23 +72,10 @@ public class OsAccountInstance implements Comparable<OsAccountInstance> {
 	 * @param instanceType The instance type.
 	 */
 	OsAccountInstance(SleuthkitCase skCase, OsAccount account, long dataSourceId, OsAccountInstanceType instanceType) {
-		this(skCase, account.getId(), dataSourceId, instanceType);
 		this.account = account;
-	}
-
-	/**
-	 * Construct with OsAccount and DataSource instances.
-	 *
-	 * @param skCase          The case instance
-	 * @param accountId       The id of the instance account.
-	 * @param dataSourceObjId The instance data source object id.
-	 * @param instanceType    The instance type.
-	 */
-	OsAccountInstance(SleuthkitCase skCase, long accountId, long dataSourceObjId, OsAccountInstanceType instanceType) {
-		this.skCase = skCase;
-		this.accountId = accountId;
-		this.dataSourceId = dataSourceObjId;
+		this.dataSourceId = dataSourceId;
 		this.instanceType = instanceType;
+		this.skCase = skCase;
 	}
 
 	/**
@@ -85,15 +83,7 @@ public class OsAccountInstance implements Comparable<OsAccountInstance> {
 	 *
 	 * @return The OsAccount object.
 	 */
-	public OsAccount getOsAccount() throws TskCoreException {
-		if (account == null) {
-			try {
-				account = skCase.getOsAccountManager().getOsAccountByObjectId(accountId);
-			} catch (TskCoreException ex) {
-				throw new TskCoreException(String.format("Failed to get OsAccount for id %d", accountId), ex);
-			}
-		}
-
+	public OsAccount getOsAccount() {
 		return account;
 	}
 
@@ -144,7 +134,7 @@ public int compareTo(OsAccountInstance other) {
 			return Long.compare(dataSourceId, other.getDataSourceId());
 		}
 
-		return Long.compare(accountId, other.accountId);
+		return Long.compare(account.getId(), other.getOsAccount().getId());
 	}
 
 	@Override
@@ -159,7 +149,7 @@ public boolean equals(Object obj) {
 			return false;
 		}
 		final OsAccountInstance other = (OsAccountInstance) obj;
-		if (this.accountId != other.accountId) {
+		if (this.account.getId() != other.getOsAccount().getId()) {
 			return false;
 		}
 
@@ -170,7 +160,7 @@ public boolean equals(Object obj) {
 	public int hashCode() {
 		int hash = 7;
 		hash = 67 * hash + Objects.hashCode(this.dataSourceId);
-		hash = 67 * hash + Objects.hashCode(this.accountId);
+		hash = 67 * hash + Objects.hashCode(this.account.getId());
 		hash = 67 * hash + Objects.hashCode(this.instanceType);
 		return hash;
 	}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java
index 99f9a8f9fd1cd5ea0d3e0cb43489700ed6352bf4..24d2ffcb41debf3a9389f042453b4460f5a26c69 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java
@@ -523,7 +523,7 @@ public void newOsAccountInstance(OsAccount osAccount, DataSource dataSource, OsA
 		}
 
 		// check cache first
-		OsAccountInstance accountInstance = new OsAccountInstance(db, osAccount, dataSource, instanceType);
+		OsAccountInstance accountInstance = new OsAccountInstance(osAccount, dataSource, instanceType);
 		if (osAccountInstanceCache.contains(accountInstance)) {
 			return;
 		}
@@ -575,43 +575,8 @@ void newOsAccountInstance(OsAccount osAccount, long dataSourceObjId, OsAccountIn
 			throw new TskCoreException("Cannot create account instance with null account.");
 		}
 
-		newOsAccountInstance(osAccount.getId(), dataSourceObjId, instanceType, connection);
-	}
-
-	/**
-	 * Adds a row to the tsk_os_account_instances table. Does nothing if the
-	 * instance already exists in the table.
-	 *
-	 * @param osAccountId     Account id for which an instance needs to be
-	 *                        added.
-	 * @param dataSourceObjId Data source id where the instance is found.
-	 * @param instanceType    Instance type.
-	 *
-	 * @throws TskCoreException If there is an error creating the account
-	 *                          instance.
-	 */
-	void newOsAccountInstance(long osAccountId, long dataSourceObjId, OsAccountInstance.OsAccountInstanceType instanceType) throws TskCoreException {
-		try (CaseDbConnection connection = this.db.getConnection()) {
-			newOsAccountInstance(osAccountId, dataSourceObjId, instanceType, connection);
-		}
-	}
-
-	/**
-	 * Adds a row to the tsk_os_account_instances table. Does nothing if the
-	 * instance already exists in the table.
-	 *
-	 * @param osAccountId     Account id for which an instance needs to be
-	 *                        added.
-	 * @param dataSourceObjId Data source id where the instance is found.
-	 * @param instanceType    Instance type.
-	 * @param connection      The current database connection.
-	 *
-	 * @throws TskCoreException If there is an error creating the account
-	 *                          instance.
-	 */
-	void newOsAccountInstance(long osAccountId, long dataSourceObjId, OsAccountInstance.OsAccountInstanceType instanceType, CaseDbConnection connection) throws TskCoreException {
 		// check cache first
-		OsAccountInstance accountInstance = new OsAccountInstance(db, osAccountId, dataSourceObjId, instanceType);
+		OsAccountInstance accountInstance = new OsAccountInstance(osAccount, dataSourceObjId, instanceType);
 		if (osAccountInstanceCache.contains(accountInstance)) {
 			return;
 		}
@@ -625,7 +590,7 @@ void newOsAccountInstance(long osAccountId, long dataSourceObjId, OsAccountInsta
 			PreparedStatement preparedStatement = connection.getPreparedStatement(accountInsertSQL, Statement.RETURN_GENERATED_KEYS);
 			preparedStatement.clearParameters();
 
-			preparedStatement.setLong(1, osAccountId);
+			preparedStatement.setLong(1, osAccount.getId());
 			preparedStatement.setLong(2, dataSourceObjId);
 			preparedStatement.setInt(3, instanceType.getId());
 
@@ -633,8 +598,14 @@ void newOsAccountInstance(long osAccountId, long dataSourceObjId, OsAccountInsta
 
 			// add to the cache.
 			osAccountInstanceCache.add(accountInstance);
+
+			// update account instances
+			List<OsAccountInstance> currentInstancesList = getOsAccountInstances(osAccount, connection);
+			currentInstancesList.add(accountInstance);
+			osAccount.setInstancesInternal(currentInstancesList);
+
 		} catch (SQLException ex) {
-			throw new TskCoreException(String.format("Error adding os account instance id = %d, data source object id = %d", osAccountId, dataSourceObjId), ex);
+			throw new TskCoreException(String.format("Error adding os account instance for account = %s, data source object id = %d", osAccount.getAddr().orElse(osAccount.getLoginName().orElse("UNKNOWN")), dataSourceObjId), ex);
 		} finally {
 			db.releaseSingleUserCaseWriteLock();
 		}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java
index 79995728adb0cba555a38fe9f0382706839f745f..34dd8edc6d58e6ba729ec5e7e3c7c7f186732ea8 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java
@@ -140,7 +140,7 @@ public ScopeConfidence getScopeConfidence() {
 	 * 
 	 * @return Realm database status. 
 	 */
-	RealmDbStatus getDbStatus() {
+	public RealmDbStatus getDbStatus() {
 		return dbStatus;
 	}	
 
@@ -282,7 +282,7 @@ boolean setSignature(String signature) {
 	/**
 	 * Encapsulates status of realm row.
 	 */
-	enum RealmDbStatus {
+	public enum RealmDbStatus {
 		ACTIVE(0, "Active"),
 		MERGED(1, "Merged"),
 		DELETED(2, "Deleted");	
@@ -295,7 +295,7 @@ enum RealmDbStatus {
 			this.name = name;
 		}
 
-		int getId() {
+		public int getId() {
 			return id;
 		}
 
@@ -303,7 +303,7 @@ String getName() {
 			return name;
 		}
 
-		static RealmDbStatus fromID(int typeId) {
+		public static RealmDbStatus fromID(int typeId) {
 			for (RealmDbStatus type : RealmDbStatus.values()) {
 				if (type.ordinal() == typeId) {
 					return type;
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Report.java b/bindings/java/src/org/sleuthkit/datamodel/Report.java
index 78eceef6c3ff152092c350a1f9d2f34aea89a70a..d5894c6419ce603f07920ec40330d8111d3ee359 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Report.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Report.java
@@ -256,13 +256,13 @@ public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactTyp
 	}
 	
 	@Override
-	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, OsAccount osAccount) throws TskCoreException {
 
 		if (artifactType.getTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) {
 			throw new TskCoreException("Reports can only have keyword hit artifacts.");
 		}
 		
-		return db.getBlackboard().newDataArtifact(artifactType, objectId, this.getDataSource().getId(), attributesList, osAccountId);
+		return db.getBlackboard().newDataArtifact(artifactType, objectId, this.getDataSource().getId(), attributesList, osAccount);
 	}
 	
 	@Override
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
index a728ac4e1c045327bd149422eff342e433745448..244e2474966e753e08659d778aa407ca5d2d08ed 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
@@ -249,12 +249,12 @@ void fireTSKEvent(Object event) {
 	private final Map<Long, Content> frequentlyUsedContentMap = new HashMap<>();
 
 	private Examiner cachedCurrentExaminer = null;
-
+	
 	static {
 		Properties p = new Properties(System.getProperties());
-		p.put("com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog");
-		p.put("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "SEVERE");
-		System.setProperties(p);
+        p.put("com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog");
+        p.put("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "SEVERE");
+        System.setProperties(p);
 	}
 
 	/**
@@ -383,8 +383,7 @@ private void init() throws Exception {
 		typeNameToAttributeTypeMap = new ConcurrentHashMap<>();
 
 		/*
-		 * The database schema must be updated before loading blackboard
-		 * artifact/attribute types
+		 * The database schema must be updated before loading blackboard artifact/attribute types
 		 */
 		updateDatabaseSchema(null);
 		initBlackboardArtifactTypes();
@@ -534,7 +533,7 @@ public synchronized TaggingManager getTaggingManager() {
 	public ScoringManager getScoringManager() throws TskCoreException {
 		return scoringManager;
 	}
-
+	
 	/**
 	 * Gets the OS account realm manager for this case.
 	 *
@@ -545,7 +544,7 @@ public ScoringManager getScoringManager() throws TskCoreException {
 	public OsAccountRealmManager getOsAccountRealmManager() throws TskCoreException {
 		return osAccountRealmManager;
 	}
-
+	
 	/**
 	 * Gets the OS account manager for this case.
 	 *
@@ -556,7 +555,7 @@ public OsAccountRealmManager getOsAccountRealmManager() throws TskCoreException
 	public OsAccountManager getOsAccountManager() throws TskCoreException {
 		return osAccountManager;
 	}
-
+	
 	/**
 	 * Gets the Hosts manager for this case.
 	 *
@@ -567,7 +566,7 @@ public OsAccountManager getOsAccountManager() throws TskCoreException {
 	public HostManager getHostManager() throws TskCoreException {
 		return hostManager;
 	}
-
+	
 	/**
 	 * Gets the Person manager for this case.
 	 *
@@ -578,7 +577,7 @@ public HostManager getHostManager() throws TskCoreException {
 	public PersonManager getPersonManager() throws TskCoreException {
 		return personManager;
 	}
-
+		
 	/**
 	 * Gets the HostAddress manager for this case.
 	 *
@@ -588,8 +587,8 @@ public PersonManager getPersonManager() throws TskCoreException {
 	 */
 	public HostAddressManager getHostAddressManager() throws TskCoreException {
 		return hostAddressManager;
-	}
-
+	}	
+	
 	/**
 	 * Make sure the predefined artifact types are in the artifact types table.
 	 *
@@ -1192,26 +1191,26 @@ private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersion
 			// Convert existing tag artifact and attribute rows to rows in the new tags tables.
 			Map<String, Long> tagNames = new HashMap<>();
 			long tagNameCounter = 1;
-
+			
 			// Convert file tags.
 			// We need data from the TSK_TAG_NAME and TSK_COMMENT attributes, and need the file size from the tsk_files table.
-			resultSet = statement.executeQuery("SELECT * FROM \n"
-					+ "(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, blackboard_attributes.value_text AS name\n"
-					+ "FROM blackboard_artifacts INNER JOIN blackboard_attributes \n"
-					+ "ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n"
-					+ "WHERE blackboard_artifacts.artifact_type_id = "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID()
-					+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()
-					+ ") AS tagNames \n"
-					+ "INNER JOIN \n"
-					+ "(SELECT tsk_files.obj_id as objId2, tsk_files.size AS fileSize \n"
-					+ "FROM blackboard_artifacts INNER JOIN tsk_files \n"
-					+ "ON blackboard_artifacts.obj_id = tsk_files.obj_id) AS fileData \n"
-					+ "ON tagNames.objId = fileData.objId2 \n"
-					+ "LEFT JOIN \n"
-					+ "(SELECT value_text AS comment, artifact_id AS tagArtifactId FROM blackboard_attributes WHERE attribute_type_id = "
-					+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n"
-					+ "ON tagNames.artifactId = tagComments.tagArtifactId");
+			resultSet = statement.executeQuery("SELECT * FROM \n" +
+				"(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, blackboard_attributes.value_text AS name\n" +
+				"FROM blackboard_artifacts INNER JOIN blackboard_attributes \n" +
+				"ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n" +
+				"WHERE blackboard_artifacts.artifact_type_id = " +
+					BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + 
+					" AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID() 
+					+ ") AS tagNames \n" +
+				"INNER JOIN \n" +
+				"(SELECT tsk_files.obj_id as objId2, tsk_files.size AS fileSize \n" +
+				"FROM blackboard_artifacts INNER JOIN tsk_files \n" +
+				"ON blackboard_artifacts.obj_id = tsk_files.obj_id) AS fileData \n" +
+				"ON tagNames.objId = fileData.objId2 \n" +
+				"LEFT JOIN \n" +
+				"(SELECT value_text AS comment, artifact_id AS tagArtifactId FROM blackboard_attributes WHERE attribute_type_id = " + 
+					BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n" +
+				"ON tagNames.artifactId = tagComments.tagArtifactId");
 
 			while (resultSet.next()) {
 				long objId = resultSet.getLong("objId");
@@ -1221,46 +1220,46 @@ private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersion
 				if (tagComment == null) {
 					tagComment = "";
 				}
-
-				if (tagName != null && !tagName.isEmpty()) {
+				
+				if (tagName != null && ! tagName.isEmpty()) {
 					// Get the index for the tag name, adding it to the database if needed.
 					long tagNameIndex;
 					if (tagNames.containsKey(tagName)) {
 						tagNameIndex = tagNames.get(tagName);
 					} else {
-						statement2.execute("INSERT INTO tag_names (display_name, description, color) "
-								+ "VALUES(\"" + tagName + "\", \"\", \"None\")");
+						statement2.execute("INSERT INTO tag_names (display_name, description, color) " +
+							"VALUES(\"" + tagName + "\", \"\", \"None\")");
 						tagNames.put(tagName, tagNameCounter);
 						tagNameIndex = tagNameCounter;
 						tagNameCounter++;
 					}
-
-					statement2.execute("INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset) "
-							+ "VALUES(" + objId + ", " + tagNameIndex + ", \"" + tagComment + "\", 0, " + fileSize + ")");
+					
+					statement2.execute("INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset) " +
+							"VALUES(" + objId + ", " + tagNameIndex + ", \"" + tagComment + "\", 0, " + fileSize + ")");
 				}
 			}
 			resultSet.close();
-
+			
 			// Convert artifact tags.
 			// We need data from the TSK_TAG_NAME, TSK_TAGGED_ARTIFACT, and TSK_COMMENT attributes.
-			resultSet = statement.executeQuery("SELECT * FROM \n"
-					+ "(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, "
-					+ "blackboard_attributes.value_text AS name\n"
-					+ "FROM blackboard_artifacts INNER JOIN blackboard_attributes \n"
-					+ "ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n"
-					+ "WHERE blackboard_artifacts.artifact_type_id = "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()
-					+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()
-					+ ") AS tagNames \n"
-					+ "INNER JOIN \n"
-					+ "(SELECT value_int64 AS taggedArtifactId, artifact_id AS associatedArtifactId FROM blackboard_attributes WHERE attribute_type_id = "
-					+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID() + ") AS tagArtifacts \n"
-					+ "ON tagNames.artifactId = tagArtifacts.associatedArtifactId \n"
-					+ "LEFT JOIN \n"
-					+ "(SELECT value_text AS comment, artifact_id AS commentArtifactId FROM blackboard_attributes WHERE attribute_type_id = "
-					+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n"
-					+ "ON tagNames.artifactId = tagComments.commentArtifactId");
-
+			resultSet = statement.executeQuery("SELECT * FROM \n" +
+				"(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, " +
+					"blackboard_attributes.value_text AS name\n" +
+				"FROM blackboard_artifacts INNER JOIN blackboard_attributes \n" +
+				"ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n" +
+				"WHERE blackboard_artifacts.artifact_type_id = " +
+					BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + 
+					" AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID() 
+					+ ") AS tagNames \n" +
+				"INNER JOIN \n" +
+				"(SELECT value_int64 AS taggedArtifactId, artifact_id AS associatedArtifactId FROM blackboard_attributes WHERE attribute_type_id = " + 
+					BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID() + ") AS tagArtifacts \n" +
+				"ON tagNames.artifactId = tagArtifacts.associatedArtifactId \n" +
+				"LEFT JOIN \n" +
+				"(SELECT value_text AS comment, artifact_id AS commentArtifactId FROM blackboard_attributes WHERE attribute_type_id = " + 
+					BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n" +
+				"ON tagNames.artifactId = tagComments.commentArtifactId");
+			
 			while (resultSet.next()) {
 				long artifactId = resultSet.getLong("taggedArtifactId");
 				String tagName = resultSet.getString("name");
@@ -1268,25 +1267,25 @@ private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersion
 				if (tagComment == null) {
 					tagComment = "";
 				}
-				if (tagName != null && !tagName.isEmpty()) {
+				if (tagName != null && ! tagName.isEmpty()) {
 					// Get the index for the tag name, adding it to the database if needed.
 					long tagNameIndex;
 					if (tagNames.containsKey(tagName)) {
 						tagNameIndex = tagNames.get(tagName);
 					} else {
-						statement2.execute("INSERT INTO tag_names (display_name, description, color) "
-								+ "VALUES(\"" + tagName + "\", \"\", \"None\")");
+						statement2.execute("INSERT INTO tag_names (display_name, description, color) " +
+							"VALUES(\"" + tagName + "\", \"\", \"None\")");
 						tagNames.put(tagName, tagNameCounter);
 						tagNameIndex = tagNameCounter;
 						tagNameCounter++;
 					}
-
-					statement2.execute("INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment) "
-							+ "VALUES(" + artifactId + ", " + tagNameIndex + ", \"" + tagComment + "\")");
+					
+					statement2.execute("INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment) " +
+							"VALUES(" + artifactId + ", " + tagNameIndex + ", \"" + tagComment + "\")");
 				}
-			}
+			}			
 			resultSet.close();
-
+			
 			statement.execute(
 					"DELETE FROM blackboard_attributes WHERE artifact_id IN " //NON-NLS
 					+ "(SELECT artifact_id FROM blackboard_artifacts WHERE artifact_type_id = " //NON-NLS
@@ -2271,12 +2270,10 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot4toSchema8dot5(CaseDbSchem
 
 			statement.execute("ALTER TABLE tag_names ADD COLUMN rank INTEGER");
 
-			/*
-			 * Update existing Project Vic tag names (from Image Gallery in
-			 * Autopsy) to be part of a Tag Set. NOTE: These names are out of
-			 * date and will not work with the Project VIC Report module. New
-			 * cases will get the new names from Image Gallery.
-			 */
+			/* Update existing Project Vic tag names (from Image Gallery in Autopsy) 
+			 * to be part of a Tag Set. 
+			 * NOTE: These names are out of date and will not work with the Project VIC 
+			 * Report module. New cases will get the new names from Image Gallery. */
 			String insertStmt = "INSERT INTO tsk_tag_sets (name) VALUES ('Project VIC')";
 			if (getDatabaseType() == DbType.POSTGRESQL) {
 				statement.execute(insertStmt, Statement.RETURN_GENERATED_KEYS);
@@ -2336,7 +2333,7 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot4toSchema8dot5(CaseDbSchem
 			releaseSingleUserCaseWriteLock();
 		}
 	}
-
+	
 	private CaseDbSchemaVersionNumber updateFromSchema8dot5toSchema8dot6(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException {
 		if (schemaVersion.getMajor() != 8) {
 			return schemaVersion;
@@ -2357,7 +2354,7 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot5toSchema8dot6(CaseDbSchem
 			closeStatement(statement);
 			releaseSingleUserCaseWriteLock();
 		}
-	}
+	}	
 
 	@SuppressWarnings("deprecation")
 	private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException {
@@ -2372,48 +2369,48 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 		Statement statement = connection.createStatement();
 		acquireSingleUserCaseWriteLock();
 		try {
-			String dateDataType = "BIGINT";
+			String dateDataType   = "BIGINT";
 			String bigIntDataType = "BIGINT";
-			String blobDataType = "BYTEA";
+			String blobDataType   = "BYTEA";
 			String primaryKeyType = "BIGSERIAL";
 
 			if (this.dbType.equals(DbType.SQLITE)) {
-				dateDataType = "INTEGER";
+				dateDataType   = "INTEGER";
 				bigIntDataType = "INTEGER";
-				blobDataType = "BLOB";
+				blobDataType   = "BLOB";
 				primaryKeyType = "INTEGER";
 			}
-			statement.execute("ALTER TABLE data_source_info ADD COLUMN added_date_time " + dateDataType);
+			statement.execute("ALTER TABLE data_source_info ADD COLUMN added_date_time "+ dateDataType );
 			statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_settings TEXT");
 			statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_name TEXT");
 			statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_version TEXT");
-
+			
 			// Add category type and initialize the types. We use the list of artifact types that
 			// were categorized as analysis results as of the 8.7 update to ensure consistency in
 			// case the built-in types change in a later release.
 			statement.execute("ALTER TABLE blackboard_artifact_types ADD COLUMN category_type INTEGER DEFAULT 0");
-			String analysisTypeObjIdList
-					= BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_SUSPECTED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_OBJECT_DETECTED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_USER_CONTENT_SUSPECTED.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_ACCOUNT_TYPE.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_YARA_HIT.getTypeID() + ", "
-					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION.getTypeID();
+			String analysisTypeObjIdList = 
+				BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + ", " 
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_SUSPECTED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_OBJECT_DETECTED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_USER_CONTENT_SUSPECTED.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_ACCOUNT_TYPE.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_YARA_HIT.getTypeID() + ", "
+				+ BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION.getTypeID();
 			statement.execute("UPDATE blackboard_artifact_types SET category_type = " + BlackboardArtifact.Category.ANALYSIS_RESULT.getID()
 					+ " WHERE artifact_type_id IN (" + analysisTypeObjIdList + ")");
 
-			// Create tsk file attributes table
+            // Create tsk file attributes table
 			statement.execute("CREATE TABLE tsk_file_attributes (id " + primaryKeyType + " PRIMARY KEY, "
 					+ "obj_id " + bigIntDataType + " NOT NULL, "
 					+ "attribute_type_id " + bigIntDataType + " NOT NULL, "
@@ -2444,7 +2441,7 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 			statement.execute("CREATE TABLE tsk_persons (id " + primaryKeyType + " PRIMARY KEY, "
 					+ "name TEXT NOT NULL, " // person name
 					+ "UNIQUE(name)) ");
-
+			
 			// Create host table.
 			statement.execute("CREATE TABLE tsk_hosts (id " + primaryKeyType + " PRIMARY KEY, "
 					+ "name TEXT NOT NULL, " // host name
@@ -2457,16 +2454,16 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 
 			// Create OS Account and related tables 
 			statement.execute("CREATE TABLE tsk_os_account_realms (id " + primaryKeyType + " PRIMARY KEY, "
-					+ "realm_name TEXT DEFAULT NULL, " // realm name - for a domain realm, may be null
-					+ "realm_addr TEXT DEFAULT NULL, " // a sid/uid or some some other identifier, may be null
-					+ "realm_signature TEXT NOT NULL, " // Signature exists only to prevent duplicates. It is  made up of realm address/name and scope host
-					+ "scope_host_id " + bigIntDataType + " DEFAULT NULL, " // if the realm scope is a single host
-					+ "scope_confidence INTEGER, " // indicates whether we know for sure the realm scope or if we are inferring it
-					+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
-					+ "merged_into " + bigIntDataType + " DEFAULT NULL, "
-					+ "UNIQUE(realm_signature), "
-					+ "FOREIGN KEY(scope_host_id) REFERENCES tsk_hosts(id),"
-					+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_account_realms(id) )");
+				+ "realm_name TEXT DEFAULT NULL, "	// realm name - for a domain realm, may be null
+				+ "realm_addr TEXT DEFAULT NULL, "		// a sid/uid or some some other identifier, may be null
+				+ "realm_signature TEXT NOT NULL, "		// Signature exists only to prevent duplicates. It is  made up of realm address/name and scope host
+				+ "scope_host_id " + bigIntDataType + " DEFAULT NULL, " // if the realm scope is a single host
+				+ "scope_confidence INTEGER, "	// indicates whether we know for sure the realm scope or if we are inferring it
+				+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+				+ "merged_into " + bigIntDataType + " DEFAULT NULL, "	
+				+ "UNIQUE(realm_signature), "
+				+ "FOREIGN KEY(scope_host_id) REFERENCES tsk_hosts(id),"
+				+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_account_realms(id) )");
 
 			// Add host column and create a host for each existing data source.
 			// We will create a host for each device id so that related data sources will 
@@ -2479,8 +2476,8 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 				while (resultSet.next()) {
 					long objId = resultSet.getLong("obj_id");
 					String deviceId = resultSet.getString("device_id");
-
-					if (!hostMap.containsKey(deviceId)) {
+					
+					if (! hostMap.containsKey(deviceId)) {
 						String hostName = "Host " + hostIndex;
 						updateStatement.execute("INSERT INTO tsk_hosts (name, db_status) VALUES ('" + hostName + "', 0)");
 						hostMap.put(deviceId, hostIndex);
@@ -2491,7 +2488,7 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 			} finally {
 				closeStatement(updateStatement);
 			}
-
+			
 			statement.execute("CREATE TABLE tsk_os_accounts (os_account_obj_id " + bigIntDataType + " PRIMARY KEY, "
 					+ "login_name TEXT DEFAULT NULL, " // login name, if available, may be null
 					+ "full_name TEXT DEFAULT NULL, " // full name, if available, may be null
@@ -2502,11 +2499,11 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 					+ "type INTEGER, " // service/interactive
 					+ "created_date " + bigIntDataType + " DEFAULT NULL, "
 					+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
-					+ "merged_into " + bigIntDataType + " DEFAULT NULL, "
+			        + "merged_into " + bigIntDataType + " DEFAULT NULL, "
 					+ "UNIQUE(signature, realm_id), "
 					+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
 					+ "FOREIGN KEY(realm_id) REFERENCES tsk_os_account_realms(id),"
-					+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_accounts(os_account_obj_id) )");
+				    + "FOREIGN KEY(merged_into) REFERENCES tsk_os_accounts(os_account_obj_id) )");
 
 			statement.execute("CREATE TABLE tsk_os_account_attributes (id " + primaryKeyType + " PRIMARY KEY, "
 					+ "os_account_obj_id " + bigIntDataType + " NOT NULL, "
@@ -2540,6 +2537,7 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 			statement.execute("ALTER TABLE tsk_files ADD COLUMN owner_uid TEXT DEFAULT NULL");
 			statement.execute("ALTER TABLE tsk_files ADD COLUMN os_account_obj_id " + bigIntDataType + " DEFAULT NULL REFERENCES tsk_os_accounts(os_account_obj_id) ");
 
+			
 			// create host address tables
 			statement.execute("CREATE TABLE tsk_host_addresses (id " + primaryKeyType + " PRIMARY KEY, "
 					+ "address_type INTEGER NOT NULL, "
@@ -2574,8 +2572,10 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchem
 					+ "data_source_obj_id " + bigIntDataType + " NOT NULL, " // data source where the usage was found
 					+ "UNIQUE(addr_obj_id, obj_id), "
 					+ "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, "
+							
 					+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )");
-
+		
+		
 			return new CaseDbSchemaVersionNumber(9, 0);
 
 		} finally {
@@ -2718,17 +2718,17 @@ public String getBackupDatabasePath() {
 	 * that is returned can be passed to methods that take a CaseDbTransaction.
 	 * The caller is responsible for calling either commit() or rollback() on
 	 * the transaction object.
-	 *
+	 * 
 	 * Note that this beginning the transaction also acquires the single user
-	 * case write lock, which will be automatically released when the
-	 * transaction is closed.
+	 * case write lock, which will be automatically released when the transaction
+	 * is closed.
 	 *
 	 * @return A CaseDbTransaction object.
 	 *
 	 * @throws TskCoreException
 	 */
 	public CaseDbTransaction beginTransaction() throws TskCoreException {
-		return new CaseDbTransaction(this);
+		return new CaseDbTransaction(this, connections.getConnection());
 	}
 
 	/**
@@ -3362,7 +3362,7 @@ public DataSource getDataSource(long objectId) throws TskDataException, TskCoreE
 					String sha256 = resultSet.getString("sha256");
 					String name = resultSet.getString("display_name");
 
-					List<String> imagePaths = getImagePathsById(objectId, connection);
+					List<String> imagePaths = getImagePathsById(objectId);
 					if (name == null) {
 						if (imagePaths.size() > 0) {
 							String path = imagePaths.get(0);
@@ -3543,7 +3543,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());	 //NON-NLS
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), 
 						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
@@ -3559,6 +3559,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		}
 	}
 
+	
 	/**
 	 * Get all blackboard artifacts that have an attribute of the given type and
 	 * String value. Does not included rejected artifacts.
@@ -3599,7 +3600,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), 
 						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
@@ -4255,8 +4256,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(ARTIFACT_TYPE artifactTyp
 	}
 
 	/**
-	 * Get the blackboard artifact with the given artifact id (artifact_id in
-	 * blackboard_artifacts)
+	 * Get the blackboard artifact with the given artifact id (artifact_id in blackboard_artifacts)
 	 *
 	 * @param artifactID artifact ID (artifact_id column)
 	 *
@@ -4394,10 +4394,10 @@ void addBlackBoardAttribute(BlackboardAttribute attr, int artifactTypeId, CaseDb
 		statement.setLong(6, attr.getAttributeType().getValueType().getType());
 		connection.executeUpdate(statement);
 	}
-
+	
 	void addFileAttribute(Attribute attr, CaseDbConnection connection) throws SQLException, TskCoreException {
-		PreparedStatement statement;
-		statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_ATTRIBUTE, Statement.RETURN_GENERATED_KEYS);
+		PreparedStatement statement; 
+		statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_ATTRIBUTE, Statement.RETURN_GENERATED_KEYS); 
 		statement.clearParameters();
 
 		statement.setLong(1, attr.getAttributeParentId());
@@ -4421,29 +4421,29 @@ void addFileAttribute(Attribute attr, CaseDbConnection connection) throws SQLExc
 		} else {
 			statement.setNull(6, java.sql.Types.INTEGER);
 		}
-
+ 
 		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME
 				|| attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG) {
 			statement.setLong(7, attr.getValueLong());
 		} else {
 			statement.setNull(7, java.sql.Types.BIGINT);
 		}
-
+		
 		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) {
 			statement.setDouble(8, attr.getValueDouble());
 		} else {
 			statement.setNull(8, java.sql.Types.DOUBLE);
 		}
-
-		connection.executeUpdate(statement);
+ 
+		connection.executeUpdate(statement);		
 		try (ResultSet resultSet = statement.getGeneratedKeys()) {
-			if (!resultSet.next()) {
+			if(!resultSet.next()) {
 				throw new TskCoreException(String.format("Failed to insert file attribute "
 						+ "with id=%d. The expected key was not generated", attr.getId()));
 			}
-
+			
 			attr.setId(resultSet.getLong(1));
-		}
+		}		
 	}
 
 	/**
@@ -4781,9 +4781,8 @@ BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException {
 
 	/**
 	 * Add an artifact type with the given name. Will return an artifact Type.
-	 *
-	 * This assumes that the artifact type being added has the category
-	 * DATA_ARTIFACT.
+	 * 
+	 * This assumes that the artifact type being added has the category DATA_ARTIFACT.
 	 *
 	 * @param artifactTypeName System (unique) name of artifact
 	 * @param displayName      Display (non-unique) name of artifact
@@ -4795,18 +4794,17 @@ BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException {
 	 *                          within tsk core
 	 */
 	public BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName, String displayName) throws TskCoreException, TskDataException {
-
+		
 		return addBlackboardArtifactType(artifactTypeName, displayName, BlackboardArtifact.Category.DATA_ARTIFACT);
 	}
 
 	/**
-	 * Add an artifact type with the given name and category. Will return an
-	 * artifact Type.
+	 * Add an artifact type with the given name and category. Will return an artifact Type.
 	 *
 	 * @param artifactTypeName System (unique) name of artifact
 	 * @param displayName      Display (non-unique) name of artifact
-	 * @param category		       Artifact type category.
-	 *
+	 * @param category		   Artifact type category.
+	 * 
 	 *
 	 * @return Type of the artifact added.
 	 *
@@ -4835,7 +4833,7 @@ BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName, Strin
 						maxID++;
 					}
 				}
-				connection.executeUpdate(s, "INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name, category_type) VALUES ('" + maxID + "', '" + artifactTypeName + "', '" + displayName + "', " + category.getID() + " )"); //NON-NLS
+				connection.executeUpdate(s, "INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name, category_type) VALUES ('" + maxID + "', '" + artifactTypeName + "', '" + displayName +  "', " + category.getID() + " )"); //NON-NLS
 				BlackboardArtifact.Type type = new BlackboardArtifact.Type(maxID, artifactTypeName, displayName, category);
 				this.typeIdToArtifactTypeMap.put(type.getTypeID(), type);
 				this.typeNameToArtifactTypeMap.put(type.getTypeName(), type);
@@ -4910,12 +4908,9 @@ public ArrayList<BlackboardAttribute> getBlackboardAttributes(final BlackboardAr
 
 	/**
 	 * Get the attributes associated with the given file.
-	 *
 	 * @param file
-	 *
 	 * @return
-	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException 
 	 */
 	ArrayList<Attribute> getFileAttributes(final AbstractFile file) throws TskCoreException {
 		CaseDbConnection connection = connections.getConnection();
@@ -5106,11 +5101,9 @@ public BlackboardArtifact newBlackboardArtifact(ARTIFACT_TYPE artifactType, long
 	/**
 	 * Add a new blackboard artifact with the given type.
 	 *
-	 * @param artifactType       the type the given artifact should have
-	 * @param obj_id             the content object id associated with this
-	 *                           artifact
-	 * @param data_source_obj_id The data source obj id associated with this
-	 *                           artifact
+	 * @param artifactType the type the given artifact should have
+	 * @param obj_id       the content object id associated with this artifact
+	 * @param data_source_obj_id The data source obj id associated with this artifact
 	 *
 	 * @return a new blackboard artifact
 	 *
@@ -5123,7 +5116,7 @@ BlackboardArtifact newBlackboardArtifact(int artifactTypeID, long obj_id, long d
 			return newBlackboardArtifact(artifactTypeID, obj_id, type.getTypeName(), type.getDisplayName(), data_source_obj_id, connection);
 		}
 	}
-
+	
 	private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName) throws TskCoreException {
 		try (CaseDbConnection connection = connections.getConnection()) {
 			long data_source_obj_id = getDataSourceObjectId(connection, obj_id);
@@ -5131,36 +5124,36 @@ private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_
 		}
 	}
 
-	PreparedStatement createInsertArtifactStatement(int artifact_type_id, long obj_id, long artifact_obj_id, long data_source_obj_id, CaseDbConnection connection) throws TskCoreException, SQLException {
-
-		PreparedStatement statement;
-		if (dbType == DbType.POSTGRESQL) {
-			statement = connection.getPreparedStatement(PREPARED_STATEMENT.POSTGRESQL_INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
-			statement.clearParameters();
-			statement.setLong(1, obj_id);
-			statement.setLong(2, artifact_obj_id);
-			statement.setLong(3, data_source_obj_id);
-			statement.setInt(4, artifact_type_id);
-		} else {
-			statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
-			statement.clearParameters();
-			this.nextArtifactId++;
-			statement.setLong(1, this.nextArtifactId);
-			statement.setLong(2, obj_id);
-			statement.setLong(3, artifact_obj_id);
-			statement.setLong(4, data_source_obj_id);
-			statement.setInt(5, artifact_type_id);
-		}
-
+	PreparedStatement createInsertArtifactStatement(int artifact_type_id, long obj_id, long artifact_obj_id,   long data_source_obj_id, CaseDbConnection connection) throws TskCoreException, SQLException {
+	
+			PreparedStatement statement;
+			if (dbType == DbType.POSTGRESQL) {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.POSTGRESQL_INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
+				statement.clearParameters();
+				statement.setLong(1, obj_id);
+				statement.setLong(2, artifact_obj_id);
+				statement.setLong(3, data_source_obj_id);
+				statement.setInt(4, artifact_type_id);
+			} else {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
+				statement.clearParameters();
+				this.nextArtifactId++;
+				statement.setLong(1, this.nextArtifactId);
+				statement.setLong(2, obj_id);
+				statement.setLong(3, artifact_obj_id);
+				statement.setLong(4, data_source_obj_id);
+				statement.setInt(5, artifact_type_id);
+			}
+		
 		return statement;
 	}
-
+	
 	BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName, long data_source_obj_id, CaseDbConnection connection) throws TskCoreException {
 		acquireSingleUserCaseWriteLock();
-		try {
+		try  {
 			long artifact_obj_id = addObject(obj_id, TskData.ObjectType.ARTIFACT.getObjectType(), connection);
 			PreparedStatement statement = createInsertArtifactStatement(artifact_type_id, obj_id, artifact_obj_id, data_source_obj_id, connection);
-
+			
 			connection.executeUpdate(statement);
 			try (ResultSet resultSet = statement.getGeneratedKeys()) {
 				resultSet.next();
@@ -5193,11 +5186,11 @@ BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, Stri
 	 * @throws TskCoreException
 	 */
 	AnalysisResult newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, String conclusion, String configuration, String justification, CaseDbConnection connection) throws TskCoreException {
-
+		
 		if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) {
-			throw new TskCoreException(String.format("Artifact type (name = %s) is not of the AnalysisResult category. ", artifactType.getTypeName()));
+			throw new TskCoreException(String.format("Artifact type (name = %s) is not of the AnalysisResult category. ", artifactType.getTypeName()) );
 		}
-
+		
 		long artifactID;
 		acquireSingleUserCaseWriteLock();
 		try {
@@ -5219,7 +5212,7 @@ AnalysisResult newAnalysisResult(BlackboardArtifact.Type artifactType, long objI
 						|| !StringUtils.isBlank(conclusion)
 						|| !StringUtils.isBlank(configuration)
 						|| !StringUtils.isBlank(justification)) {
-
+						
 					PreparedStatement analysisResultsStatement;
 
 					analysisResultsStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ANALYSIS_RESULT);
@@ -5244,14 +5237,14 @@ AnalysisResult newAnalysisResult(BlackboardArtifact.Type artifactType, long objI
 			} finally {
 				closeResultSet(resultSet);
 			}
-
+		
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error creating a analysis result", ex);
 		} finally {
 			releaseSingleUserCaseWriteLock();
 		}
 	}
-
+	
 	/**
 	 * Checks if the content object has children. Note: this is generally more
 	 * efficient then preloading all children and checking if the set is empty,
@@ -5951,8 +5944,7 @@ AbstractFile getAbstractFileById(long objectId, CaseDbConnection connection) thr
 	/**
 	 * Get artifact from blackboard_artifacts table by its artifact_obj_id
 	 *
-	 * @param id id of the artifact in blackboard_artifacts table
-	 *           (artifact_obj_id column)
+	 * @param id id of the artifact in blackboard_artifacts table (artifact_obj_id column)
 	 *
 	 * @return Artifact object populated, or null if not found.
 	 *
@@ -5960,7 +5952,7 @@ AbstractFile getAbstractFileById(long objectId, CaseDbConnection connection) thr
 	 *                          core and file could not be queried
 	 */
 	public BlackboardArtifact getArtifactById(long id) throws TskCoreException {
-
+		
 		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseReadLock();
 		ResultSet rs = null;
@@ -5969,12 +5961,12 @@ public BlackboardArtifact getArtifactById(long id) throws TskCoreException {
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TYPE_BY_ARTIFACT_OBJ_ID);
 			statement.clearParameters();
 			statement.setLong(1, id);
-
+			
 			rs = connection.executeQuery(statement);
 			if (!rs.next()) {
 				throw new TskCoreException("Error getting artifacttype for artifact with artifact_obj_id = " + id);
 			}
-
+		
 			// based on the artifact type category, get the analysis result or the data artifact
 			BlackboardArtifact.Type artifactType = getArtifactType(rs.getInt("artifact_type_id"));
 			switch (artifactType.getCategory()) {
@@ -5985,7 +5977,7 @@ public BlackboardArtifact getArtifactById(long id) throws TskCoreException {
 				default:
 					throw new TskCoreException(String.format("Unknown artifact category for artifact with artifact_obj_id = %d, and artifact type = %s", id, artifactType.getTypeName()));
 			}
-
+			
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting artifacts by artifact_obj_id, artifact_obj_id = " + id, ex);
 		} finally {
@@ -6348,12 +6340,13 @@ public VirtualDirectory addVirtualDirectory(long parentId, String directoryName,
 
 			//extension, since this is not really file we just set it to null
 			statement.setString(21, null);
-
+			
 			statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
 			statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
-
+			
 			connection.executeUpdate(statement);
 
+			
 			return new VirtualDirectory(this, newObjId, dataSourceObjectId, directoryName, dirType,
 					metaType, dirFlag, metaFlags, null, null, FileKnown.UNKNOWN,
 					parentPath);
@@ -6486,7 +6479,7 @@ public LocalDirectory addLocalDirectory(long parentId, String directoryName, Cas
 
 			statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
 			statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
-
+			
 			connection.executeUpdate(statement);
 
 			return new LocalDirectory(this, newObjId, dataSourceObjectId, directoryName, dirType,
@@ -6521,7 +6514,7 @@ public LocalDirectory addLocalDirectory(long parentId, String directoryName, Cas
 	public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootDirectoryName, String timeZone, CaseDbTransaction transaction) throws TskCoreException {
 		return addLocalFilesDataSource(deviceId, rootDirectoryName, timeZone, null, transaction);
 	}
-
+	
 	/**
 	 * Adds a local/logical files and/or directories data source.
 	 *
@@ -6548,7 +6541,7 @@ public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String root
 		Statement statement = null;
 		try {
 			CaseDbConnection connection = transaction.getConnection();
-
+			
 			// Insert a row for the root virtual directory of the data source
 			// into the tsk_objects table.
 			long newObjId = addObject(0, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection);
@@ -6556,8 +6549,8 @@ public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String root
 			// If no host was supplied, make one
 			if (host == null) {
 				host = getHostManager().newHost("LogicalFileSet_" + newObjId + " Host", transaction);
-			}
-
+			}			
+			
 			// Insert a row for the virtual directory of the data source into
 			// the data_source_info table.
 			statement = connection.createStatement();
@@ -6638,8 +6631,8 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 			String deviceId,
 			CaseDbTransaction transaction) throws TskCoreException {
 		return addImage(type, sectorSize, size, displayName, imagePaths, timezone, md5, sha1, sha256, deviceId, null, transaction);
-	}
-
+	}	
+	
 	/**
 	 * Add an image to the database.
 	 *
@@ -6697,7 +6690,7 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 				preparedStatement.setLong(3, i);
 				connection.executeUpdate(preparedStatement);
 			}
-
+			
 			// Create the display name
 			String name = displayName;
 			if (name == null || name.isEmpty()) {
@@ -6708,7 +6701,7 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 					name = "";
 				}
 			}
-
+			
 			// Create a host if needed
 			if (host == null) {
 				if (name.isEmpty()) {
@@ -6963,7 +6956,7 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size,
 			long ctime, long crtime, long atime, long mtime,
 			boolean isFile, Content parent) throws TskCoreException {
-
+		
 		CaseDbTransaction transaction = beginTransaction();
 		try {
 
@@ -6972,7 +6965,7 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 					ctime, crtime, atime, mtime, null, null, null, isFile, parent,
 					OsAccount.NO_OWNER_ID, null,
 					Collections.emptyList(), transaction);
-
+			
 			transaction.commit();
 			transaction = null;
 			return fileSystemFile;
@@ -7032,7 +7025,7 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			long ctime, long crtime, long atime, long mtime,
 			String md5Hash, String sha256Hash, String mimeType,
 			boolean isFile, Content parent, String ownerUid,
-			OsAccount osAccount, List<Attribute> fileAttributes,
+			OsAccount osAccount, List<Attribute> fileAttributes, 
 			CaseDbTransaction transaction) throws TskCoreException {
 
 		TimelineManager timelineManager = getTimelineManager();
@@ -7056,7 +7049,7 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			} else {
 				parentPath = "/";
 			}
-
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_SYSTEM_FILE);
 			statement.clearParameters();
 			statement.setLong(1, objectId);											// obj_is
@@ -7100,17 +7093,17 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 					size, ctime, crtime, atime, mtime, md5Hash, sha256Hash, null, parentPath, null, parent.getId(), mimeType, null, extension, ownerUid, osAccountId);
 
 			timelineManager.addEventsForNewFile(derivedFile, connection);
-
+			
 			for (Attribute fileAttribute : fileAttributes) {
 				fileAttribute.setAttributeParentId(objectId);
 				fileAttribute.setCaseDatabase(this);
 				addFileAttribute(fileAttribute, connection);
 			}
 
-			if (osAccount != null) {
+			if(osAccount != null) {
 				osAccountManager.newOsAccountInstance(osAccount, dataSourceObjId, OsAccountInstance.OsAccountInstanceType.LAUNCHED, connection);
 			}
-
+			
 			return new org.sleuthkit.datamodel.File(this, objectId, dataSourceObjId, fsObjId,
 					attrType, attrId, fileName, metaAddr, metaSeq,
 					dirType, metaType, dirFlag, metaFlags,
@@ -7122,7 +7115,7 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			throw new TskCoreException(String.format("Failed to INSERT file system file %s (%s) with parent id %d in tsk_files table", fileName, parentPath, parent.getId()), ex);
 		} finally {
 			closeStatement(queryStatement);
-		}
+		} 
 	}
 
 	/**
@@ -7204,9 +7197,8 @@ public final List<LayoutFile> addLayoutFiles(Content parent, List<TskFileRange>
 				 * INSERT INTO tsk_files (obj_id, fs_obj_id, name, type,
 				 * has_path, dir_type, meta_type, dir_flags, meta_flags, size,
 				 * ctime, crtime, atime, mtime, md5, known, mime_type,
-				 * parent_path, data_source_obj_id,extension, owner_uid,
-				 * os_account_obj_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
-				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?)
+				 * parent_path, data_source_obj_id,extension, owner_uid, os_account_obj_id) VALUES (?, ?, ?,
+				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?)
 				 */
 				PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 				prepStmt.clearParameters();
@@ -7233,10 +7225,10 @@ public final List<LayoutFile> addLayoutFiles(Content parent, List<TskFileRange>
 
 				//extension, since this is not a FS file we just set it to null
 				prepStmt.setString(21, null);
-
+				
 				prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
 				prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
-
+				
 				connection.executeUpdate(prepStmt);
 
 				/*
@@ -7392,9 +7384,8 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 				 * INSERT INTO tsk_files (obj_id, fs_obj_id, name, type,
 				 * has_path, dir_type, meta_type, dir_flags, meta_flags, size,
 				 * ctime, crtime, atime, mtime, md5, known, mime_type,
-				 * parent_path, data_source_obj_id,extenion, owner_uid,
-				 * os_account_obj_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
-				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+				 * parent_path, data_source_obj_id,extenion, owner_uid, os_account_obj_id) 
+				 * VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 				 */
 				PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 				prepStmt.clearParameters();
@@ -7423,10 +7414,10 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 				prepStmt.setString(19, parentPath); // parent path
 				prepStmt.setLong(20, carvedFilesDir.getDataSourceObjectId()); // data_source_obj_id
 				prepStmt.setString(21, extractExtension(carvedFile.getName())); //extension
-
+				
 				prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
 				prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
-
+				
 				connection.executeUpdate(prepStmt);
 
 				/*
@@ -7605,7 +7596,7 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 			final String extension = extractExtension(fileName);
 			//extension
 			statement.setString(21, extension);
-
+			
 			statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
 			statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
 
@@ -7928,7 +7919,7 @@ public LocalFile addLocalFile(String fileName, String localPath,
 					parent.getId(), parentPath,
 					dataSourceObjId,
 					localPath,
-					encodingType, extension,
+					encodingType, extension, 
 					OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT);
 			getTimelineManager().addEventsForNewFile(localFile, connection);
 			return localFile;
@@ -7939,52 +7930,6 @@ public LocalFile addLocalFile(String fileName, String localPath,
 			closeStatement(queryStatement);
 		}
 	}
-	
-	/**
-	 * Utility class to create keys for the cache used in isRootDirectory().
-	 * The dataSourceId must be set but the fileSystemId can be null 
-	 * (for local directories, for example).
-	 */
-	private class RootDirectoryKey {
-        private long dataSourceId;
-        private Long fileSystemId;
-
-        RootDirectoryKey(long dataSourceId, Long fileSystemId) {
-            this.dataSourceId = dataSourceId;
-            this.fileSystemId = fileSystemId;
-        }
-
-        @Override
-        public int hashCode() {
-            int hash = 7;
-			hash = 41 * hash + Objects.hashCode(dataSourceId);
-            hash = 41 * hash + Objects.hashCode(fileSystemId);
-            return hash;
-        }
-
-        @Override
-        public boolean equals(Object obj) {
-            if (this == obj) {
-                return true;
-            }
-            if (obj == null) {
-                return false;
-            }
-            if (getClass() != obj.getClass()) {
-                return false;
-            }
-
-			RootDirectoryKey otherKey = (RootDirectoryKey)obj;
-			if (dataSourceId != otherKey.dataSourceId) {
-				return false;
-			}
-
-			if (fileSystemId != null) {
-				return fileSystemId.equals(otherKey.fileSystemId);
-			}
-			return (otherKey.fileSystemId == null);
-		}
-	}	
 
 	/**
 	 * Utility class to create keys for the cache used in isRootDirectory().
@@ -8045,6 +7990,7 @@ public boolean equals(Object obj) {
 	 * @throws TskCoreException
 	 */
 	private boolean isRootDirectory(AbstractFile file, CaseDbTransaction transaction) throws TskCoreException {	
+
 		// First check if we know the root directory for this data source and optionally 
 		// file system. There is only one root, so if we know it we can simply compare 
 		// this file ID to the known root directory.
@@ -8168,8 +8114,8 @@ public LayoutFile addLayoutFile(String fileName,
 			 * tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type,
 			 * meta_type, dir_flags, meta_flags, size, ctime, crtime, atime,
 			 * mtime, md5, known, mime_type, parent_path,
-			 * data_source_obj_id,extenion, owner_uid, os_account_obj_id) VALUES
-			 * (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+			 * data_source_obj_id,extenion, owner_uid, os_account_obj_id) 
+			 * VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 			 */
 			PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 			prepStmt.clearParameters();
@@ -8208,10 +8154,10 @@ public LayoutFile addLayoutFile(String fileName,
 			prepStmt.setLong(20, parent.getDataSource().getId()); // data_source_obj_id
 
 			prepStmt.setString(21, extractExtension(fileName)); 				//extension
-
+			
 			prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
 			prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
-
+			
 			connection.executeUpdate(prepStmt);
 
 			/*
@@ -8437,16 +8383,16 @@ public List<AbstractFile> findAllFilesWhere(String sqlWhereClause) throws TskCor
 			releaseSingleUserCaseReadLock();
 		}
 	}
-
+	
 	/**
 	 * Find and return list of all (abstract) files matching the specific Where
-	 * clause with the give parentId. You need to know the database schema to
+	 * clause with the give parentId. You need to know the database schema to 
 	 * use this, which is outlined on the
 	 * <a href="http://wiki.sleuthkit.org/index.php?title=SQLite_Database_v3_Schema">wiki</a>.
 	 * You should use enums from org.sleuthkit.datamodel.TskData to make the
 	 * queries easier to maintain and understand.
-	 *
-	 * @param parentId       The parentId
+	 * 
+	 * @param parentId The parentId 
 	 * @param sqlWhereClause a SQL where clause appropriate for the desired
 	 *                       files (do not begin the WHERE clause with the word
 	 *                       WHERE!)
@@ -8456,19 +8402,19 @@ public List<AbstractFile> findAllFilesWhere(String sqlWhereClause) throws TskCor
 	 *
 	 * @throws TskCoreException \ref query_database_page
 	 */
-	public List<AbstractFile> findAllFilesInFolderWhere(long parentId, String sqlWhereClause) throws TskCoreException {
-		String queryTemplate = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE par_obj_id = %d AND %s";
-
-		try (CaseDbConnection connection = connections.getConnection()) {
+	public List<AbstractFile> findAllFilesInFolderWhere(long parentId, String sqlWhereClause) throws TskCoreException{
+		String queryTemplate =  "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE par_obj_id = %d AND %s";
+		
+		try(CaseDbConnection connection = connections.getConnection()) {
 			acquireSingleUserCaseReadLock();
-
+			
 			String query = String.format(queryTemplate, parentId, sqlWhereClause);
-			try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) {
+			try(Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) {
 				return resultSetToAbstractFiles(rs, connection);
-			} catch (SQLException ex) {
+			} catch(SQLException ex) {
 				throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.findAllFilesInFolderWhere(): " + query, ex);
 			}
-		} finally {
+		}finally {
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9292,8 +9238,8 @@ List<Long> getVolumeChildrenIds(Volume vol) throws TskCoreException {
 	 */
 	public Image addImageInfo(long deviceObjId, List<String> imageFilePaths, String timeZone) throws TskCoreException {
 		return addImageInfo(deviceObjId, imageFilePaths, timeZone, null);
-	}
-
+	}	
+	
 	/**
 	 * Adds an image to the case database.
 	 *
@@ -9329,8 +9275,8 @@ public Map<Long, List<String>> getImagePaths() throws TskCoreException {
 		ResultSet rs1 = null;
 		try {
 			s1 = connection.createStatement();
-			rs1 = connection.executeQuery(s1, "SELECT tsk_image_info.obj_id, tsk_image_names.name FROM tsk_image_info "
-					+ "LEFT JOIN tsk_image_names ON tsk_image_info.obj_id = tsk_image_names.obj_id"); //NON-NLS
+			rs1 = connection.executeQuery(s1, "SELECT tsk_image_info.obj_id, tsk_image_names.name FROM tsk_image_info " +
+				"LEFT JOIN tsk_image_names ON tsk_image_info.obj_id = tsk_image_names.obj_id"); //NON-NLS
 			Map<Long, List<String>> imgPaths = new LinkedHashMap<Long, List<String>>();
 			while (rs1.next()) {
 				long obj_id = rs1.getLong("obj_id"); //NON-NLS
@@ -9346,7 +9292,7 @@ public Map<Long, List<String>> getImagePaths() throws TskCoreException {
 					if (name != null) {
 						imagePaths.add(name);
 					}
-				}
+				}				
 			}
 			return imgPaths;
 		} catch (SQLException ex) {
@@ -9363,15 +9309,15 @@ public Map<Long, List<String>> getImagePaths() throws TskCoreException {
 	 * Returns a list of fully qualified file paths based on an image object ID.
 	 *
 	 * @param objectId The object id of the data source.
-	 * @param connection Database connection to use.
 	 *
 	 * @return List of file paths.
 	 *
 	 * @throws TskCoreException Thrown if a critical error occurred within tsk
 	 *                          core
 	 */
-	private List<String> getImagePathsById(long objectId, CaseDbConnection connection) throws TskCoreException {
+	private List<String> getImagePathsById(long objectId) throws TskCoreException {
 		List<String> imagePaths = new ArrayList<String>();
+		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseReadLock();
 		Statement statement = null;
 		ResultSet resultSet = null;
@@ -9386,6 +9332,7 @@ private List<String> getImagePathsById(long objectId, CaseDbConnection connectio
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
+			connection.close();
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -9555,12 +9502,12 @@ private List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnecti
 					if (parentPath == null) {
 						parentPath = "/"; //NON-NLS
 					}
-
+					
 					Long osAccountObjId = rs.getLong("os_account_obj_id");
 					if (rs.wasNull()) {
 						osAccountObjId = null;
 					}
-
+		
 					LayoutFile lf = new LayoutFile(this,
 							rs.getLong("obj_id"), //NON-NLS
 							rs.getLong("data_source_obj_id"),
@@ -9570,9 +9517,9 @@ private List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnecti
 							TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), //NON-NLS
 							rs.getLong("size"), //NON-NLS
 							rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
-							rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath,
-							rs.getString("mime_type"),
-							rs.getString("owner_uid"), osAccountObjId); //NON-NLS
+							rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath, 
+							rs.getString("mime_type"), 
+							rs.getString("owner_uid"), osAccountObjId ); //NON-NLS
 					results.add(lf);
 				} else if (type == TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()) {
 					final DerivedFile df;
@@ -9611,7 +9558,7 @@ org.sleuthkit.datamodel.File file(ResultSet rs, FileSystem fs) throws SQLExcepti
 		if (rs.wasNull()) {
 			osAccountObjId = null;
 		}
-
+				
 		org.sleuthkit.datamodel.File f = new org.sleuthkit.datamodel.File(this, rs.getLong("obj_id"), //NON-NLS
 				rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS
 				TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS
@@ -9644,7 +9591,7 @@ Directory directory(ResultSet rs, FileSystem fs) throws SQLException {
 		if (rs.wasNull()) {
 			osAccountObjId = null;
 		}
-
+		
 		Directory dir = new Directory(this, rs.getLong("obj_id"), rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS
 				TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS
 				rs.getInt("attr_id"), rs.getString("name"), rs.getLong("meta_addr"), rs.getInt("meta_seq"), //NON-NLS
@@ -9792,12 +9739,12 @@ private DerivedFile derivedFile(ResultSet rs, CaseDbConnection connection, long
 		if (parentPath == null) {
 			parentPath = "";
 		}
-
+		
 		Long osAccountObjId = rs.getLong("os_account_obj_id");
 		if (rs.wasNull()) {
 			osAccountObjId = null;
 		}
-
+				
 		final DerivedFile df = new DerivedFile(this, objId, rs.getLong("data_source_obj_id"),
 				rs.getString("name"), //NON-NLS
 				TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS
@@ -9807,7 +9754,7 @@ private DerivedFile derivedFile(ResultSet rs, CaseDbConnection connection, long
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
 				parentPath, localPath, parentId, rs.getString("mime_type"),
-				encodingType, rs.getString("extension"),
+				encodingType, rs.getString("extension"), 
 				rs.getString("owner_uid"), osAccountObjId);
 		return df;
 	}
@@ -9856,7 +9803,7 @@ private LocalFile localFile(ResultSet rs, CaseDbConnection connection, long pare
 		if (rs.wasNull()) {
 			osAccountObjId = null;
 		}
-
+		
 		LocalFile file = new LocalFile(this, objId, rs.getString("name"), //NON-NLS
 				TSK_DB_FILES_TYPE_ENUM.valueOf(rs.getShort("type")), //NON-NLS
 				TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS
@@ -9898,7 +9845,7 @@ org.sleuthkit.datamodel.SlackFile slackFile(ResultSet rs, FileSystem fs) throws
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				(short) rs.getInt("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS
 				rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
-				rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension"),
+				rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension"), 
 				rs.getString("owner_uid"), osAccountObjId); //NON-NLS
 		f.setFileSystem(fs);
 		return f;
@@ -9965,7 +9912,7 @@ List<Content> fileChildren(ResultSet rs, CaseDbConnection connection, long paren
 								rs.getLong("size"),
 								rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"),
 								rs.getString("md5"), rs.getString("sha256"),
-								FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type"),
+								FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type"), 
 								rs.getString("owner_uid"), osAccountObjId);
 						children.add(lf);
 						break;
@@ -10090,10 +10037,9 @@ CaseDbConnection getConnection() throws TskCoreException {
 
 	/**
 	 * Gets the string used to identify this case in the JNI cache.
-	 *
+	 * 
 	 * @return The string for this case
-	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException 
 	 */
 	String getCaseHandleIdentifier() {
 		return caseHandleIdentifier;
@@ -10224,6 +10170,8 @@ void setImageName(String name, long objId) throws TskCoreException {
 		}
 	}
 
+
+
 	/**
 	 * Updates the image's total size and sector size.This function may be used
 	 * to update the sizes after the image was created.
@@ -10249,7 +10197,7 @@ void setImageSizes(Image image, long totalSize, long sectorSize) throws TskCoreE
 			preparedStatement.setLong(3, image.getId());
 			connection.executeUpdate(preparedStatement);
 		} catch (SQLException ex) {
-			throw new TskCoreException(String.format("Error updating image sizes to %d and sector size to %d for object ID %d ", totalSize, sectorSize, image.getId()), ex);
+			throw new TskCoreException(String.format("Error updating image sizes to %d and sector size to %d for object ID %d ",totalSize, sectorSize, image.getId()), ex);
 		} finally {
 			releaseSingleUserCaseWriteLock();
 		}
@@ -10284,8 +10232,8 @@ public void setFileMIMEType(AbstractFile file, String mimeType) throws TskCoreEx
 	}
 
 	/**
-	 * Sets the unalloc meta flags for the file in the case database, and
-	 * updates the meta flags in given file object. Also updates the dir flag to
+	 * Sets the unalloc meta flags for the file in the case database, and updates
+	 * the meta flags in given file object. Also updates the dir flag to
 	 * unalloc.
 	 *
 	 * @param file A file.
@@ -10294,16 +10242,16 @@ public void setFileMIMEType(AbstractFile file, String mimeType) throws TskCoreEx
 	 * @throws TskCoreException If there is an error updating the case database.
 	 */
 	public void setFileUnalloc(AbstractFile file) throws TskCoreException {
-
+		
 		// get the flags, reset the ALLOC flag, and set the UNALLOC flag
 		short metaFlag = file.getMetaFlagsAsInt();
 		Set<TSK_FS_META_FLAG_ENUM> metaFlagAsSet = TSK_FS_META_FLAG_ENUM.valuesOf(metaFlag);
 		metaFlagAsSet.remove(TSK_FS_META_FLAG_ENUM.ALLOC);
 		metaFlagAsSet.add(TSK_FS_META_FLAG_ENUM.UNALLOC);
-
+		
 		short newMetaFlgs = TSK_FS_META_FLAG_ENUM.toInt(metaFlagAsSet);
 		short newDirFlags = TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue();
-
+		 
 		CaseDbConnection connection = connections.getConnection();
 		Statement statement = null;
 		ResultSet rs = null;
@@ -10311,12 +10259,12 @@ public void setFileUnalloc(AbstractFile file) throws TskCoreException {
 		try {
 			statement = connection.createStatement();
 			connection.executeUpdate(statement, String.format("UPDATE tsk_files SET meta_flags = '%d', dir_flags = '%d'  WHERE obj_id = %d", newMetaFlgs, newDirFlags, file.getId()));
-
+			
 			file.removeMetaFlag(TSK_FS_META_FLAG_ENUM.ALLOC);
 			file.setMetaFlag(TSK_FS_META_FLAG_ENUM.UNALLOC);
-
+			
 			file.setDirFlag(TSK_FS_NAME_FLAG_ENUM.UNALLOC);
-
+			
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error setting unalloc meta flag for file (obj_id = %s)", file.getId()), ex);
 		} finally {
@@ -10326,7 +10274,7 @@ public void setFileUnalloc(AbstractFile file) throws TskCoreException {
 			releaseSingleUserCaseWriteLock();
 		}
 	}
-
+	
 	/**
 	 * Store the md5Hash for the file in the database
 	 *
@@ -10577,6 +10525,7 @@ void setAcquisitionDetails(DataSource datasource, String details) throws TskCore
 		}
 	}
 
+
 	/**
 	 * Sets the acquisition tool details such as its name, version number and
 	 * any settings used during the acquisition to acquire data.
@@ -10611,12 +10560,12 @@ void setAcquisitionToolDetails(DataSource datasource, String name, String versio
 
 	/**
 	 * Set the acquisition details in the data_source_info table.
-	 *
+	 * 
 	 * @param dataSourceId The data source ID.
 	 * @param details      The acquisition details.
 	 * @param trans        The current transaction.
-	 *
-	 * @throws TskCoreException
+	 * 
+	 * @throws TskCoreException 
 	 */
 	void setAcquisitionDetails(long dataSourceId, String details, CaseDbTransaction trans) throws TskCoreException {
 		acquireSingleUserCaseWriteLock();
@@ -10633,7 +10582,7 @@ void setAcquisitionDetails(long dataSourceId, String details, CaseDbTransaction
 			releaseSingleUserCaseWriteLock();
 		}
 	}
-
+	
 	/**
 	 * Get the acquisition details from the data_source_info table
 	 *
@@ -10668,14 +10617,12 @@ String getAcquisitionDetails(DataSource datasource) throws TskCoreException {
 	}
 
 	/**
-	 * Get String value from the provided column from data_source_info table.
-	 *
+	 * Get String value from the provided column from data_source_info table. 
+	 * 
 	 * @param datasource The datasource
-	 * @param columnName The column from which the data should be returned
-	 *
-	 * @return String value from the column
-	 *
-	 * @throws TskCoreException
+	 * @param columnName The column from which the data should be returned 
+	 * @return String value from the column 
+	 * @throws TskCoreException 
 	 */
 	String getDataSourceInfoString(DataSource datasource, String columnName) throws TskCoreException {
 		long id = datasource.getId();
@@ -10701,14 +10648,13 @@ String getDataSourceInfoString(DataSource datasource, String columnName) throws
 		}
 	}
 
+
 	/**
 	 * Get Long value from the provided column from data_source_info table.
 	 *
 	 * @param datasource The datasource
 	 * @param columnName The column from which the data should be returned
-	 *
 	 * @return Long value from the column
-	 *
 	 * @throws TskCoreException
 	 */
 	Long getDataSourceInfoLong(DataSource datasource, String columnName) throws TskCoreException {
@@ -11096,7 +11042,7 @@ public TagName addOrUpdateTagName(String displayName, String description, TagNam
 			resultSet.next();
 
 			return new TagName(resultSet.getLong("tag_name_id"), displayName, description, color, knownStatus, resultSet.getLong("tag_set_id"), resultSet.getInt("rank"));
-
+			
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error adding row for " + displayName + " tag name to tag_names table", ex);
 		} finally {
@@ -12400,6 +12346,7 @@ private enum PREPARED_STATEMENT {
 		SELECT_FILE_BY_ID("SELECT * FROM tsk_files WHERE obj_id = ? LIMIT 1"), //NON-NLS
 		SELECT_ARTIFACT_BY_ARTIFACT_OBJ_ID("SELECT * FROM blackboard_artifacts WHERE artifact_obj_id = ? LIMIT 1"),
 		SELECT_ARTIFACT_TYPE_BY_ARTIFACT_OBJ_ID("SELECT artifact_type_id FROM blackboard_artifacts WHERE artifact_obj_id = ? LIMIT 1"),
+		
 		SELECT_ARTIFACT_BY_ARTIFACT_ID("SELECT * FROM blackboard_artifacts WHERE artifact_id = ? LIMIT 1"),
 		INSERT_ARTIFACT("INSERT INTO blackboard_artifacts (artifact_id, obj_id, artifact_obj_id, data_source_obj_id, artifact_type_id, review_status_id) " //NON-NLS
 				+ "VALUES (?, ?, ?, ?, ?," + BlackboardArtifact.ReviewStatus.UNDECIDED.getID() + ")"), //NON-NLS
@@ -12568,7 +12515,7 @@ private enum PREPARED_STATEMENT {
 				+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"),
 		SELECT_TAG_NAME_BY_ID("SELECT * FROM tag_names where tag_name_id = ?"),
 		SELECT_TAG_NAME_BY_NAME("SELECT * FROM tag_names where display_name = ?");
-
+		
 		private final String sql;
 
 		private PREPARED_STATEMENT(String sql) {
@@ -12759,15 +12706,14 @@ public void execute() throws SQLException {
 		}
 
 		/**
-		 * Obtains a write lock on tsk_aggregate_score table. Only PostgreSQL is
-		 * supported.
-		 *
-		 * NOTE: We run into deadlock risks when we start to lock multiple
-		 * tables. If that need arrises, consider changing to opportunistic
-		 * locking and single-step transactions.
-		 */
+		 * Obtains a write lock on tsk_aggregate_score table. 
+		 * Only PostgreSQL is supported. 
+		 * 
+		 * NOTE: We run into deadlock risks when we start to lock 
+		 * multiple tables. If that need arrises, consider changing 
+		 * to opportunistic locking and single-step transactions. 
+		 */		
 		private class AggregateScoreTablePostgreSQLWriteLock implements DbCommand {
-
 			private final Connection connection;
 
 			AggregateScoreTablePostgreSQLWriteLock(Connection connection) {
@@ -12778,7 +12724,7 @@ private class AggregateScoreTablePostgreSQLWriteLock implements DbCommand {
 			public void execute() throws SQLException {
 				PreparedStatement preparedStatement = connection.prepareStatement("LOCK TABLE ONLY tsk_aggregate_score in SHARE ROW EXCLUSIVE MODE");
 				preparedStatement.execute();
-
+		
 			}
 		}
 
@@ -12945,17 +12891,17 @@ PreparedStatement getPreparedStatement(PREPARED_STATEMENT statementKey, int gene
 			}
 			return statement;
 		}
-
+		
 		/**
-		 * Get a prepared statement for the given input. Will cache the prepared
-		 * statement for this connection.
-		 *
-		 * @param sqlStatement The SQL for the prepared statement.
-		 * @param generateKeys The generate keys enum from Statement.
-		 *
+		 * Get a prepared statement for the given input.
+		 * Will cache the prepared statement for this connection.
+		 * 
+		 * @param sqlStatement  The SQL for the prepared statement.
+		 * @param generateKeys  The generate keys enum from Statement.
+		 * 
 		 * @return The prepared statement
-		 *
-		 * @throws SQLException
+		 * 
+		 * @throws SQLException 
 		 */
 		PreparedStatement getPreparedStatement(String sqlStatement, int generateKeys) throws SQLException {
 			PreparedStatement statement;
@@ -13026,14 +12972,14 @@ void rollbackTransactionWithThrow() throws SQLException {
 				connection.setAutoCommit(true);
 			}
 		}
-
+		
 		/**
 		 * Blocks until a write lock can be obtained on the tsk_aggregate_score
-		 * table. Used to ensure only one thread/client is updating the score at
-		 * a time. Can be called multiple times on the same transaction.
-		 *
+		 * table. Used to ensure only one thread/client is updating the score
+		 * at a time.  Can be called multiple times on the same transaction.
+		 * 
 		 * @throws SQLException
-		 * @throws TskCoreException
+		 * @throws TskCoreException 
 		 */
 		void getAggregateScoreTableWriteLock() throws SQLException, TskCoreException {
 			switch (getDatabaseType()) {
@@ -13091,10 +13037,10 @@ void executeUpdate(PreparedStatement statement) throws SQLException {
 		@Override
 		public void close() {
 			try {
-				for (PreparedStatement stmt : preparedStatements.values()) {
+				for (PreparedStatement stmt:preparedStatements.values()) {
 					closeStatement(stmt);
 				}
-				for (PreparedStatement stmt : adHocPreparedStatements.values()) {
+				for (PreparedStatement stmt:adHocPreparedStatements.values()) {
 					closeStatement(stmt);
 				}
 				connection.close();
@@ -13211,12 +13157,12 @@ void executeCommand(DbCommand command) throws SQLException {
 	 * Transaction interface because that sort of flexibility and its associated
 	 * complexity is not needed. Also, TskCoreExceptions are thrown to be
 	 * consistent with the outer SleuthkitCase class.
-	 *
-	 * This class will automatically acquire the single user case write lock and
-	 * release it when the transaction is closed. Otherwise we risk deadlock
+	 * 
+	 * This class will automatically acquire the single user case write lock
+	 * and release it when the transaction is closed. Otherwise we risk deadlock 
 	 * because this transaction can lock up SQLite and make it "busy" and
-	 * another thread may get a write lock to the DB, but not be able to do
-	 * anything because the DB is busy.
+	 * another thread may get a write lock to the DB, but not
+	 * be able to do anything because the DB is busy.
 	 */
 	public static final class CaseDbTransaction {
 
@@ -13226,31 +13172,28 @@ public static final class CaseDbTransaction {
 		// A collection of object score changes that ocuured as part of this transaction.
 		// When the transaction is committed, events are fired to notify any listeners.
 		// Score changes are stored as a map keyed by objId to prevent duplicates.
-		private Map<Long, ScoreChange> scoreChangeMap = new HashMap<>();
+		private Map<Long, ScoreChange> scoreChangeMap = new HashMap<>(); 
 		private List<Host> hostsAdded = new ArrayList<>();
 		private List<OsAccount> accountsChanged = new ArrayList<>();
 		private List<OsAccount> accountsAdded = new ArrayList<>();
 		private List<Long> deletedOsAccountObjectIds = new ArrayList<>();
 		private List<Long> deletedResultObjectIds = new ArrayList<>();
-
+		
 		private static Set<Long> threadsWithOpenTransaction = new HashSet<>();
 		private static final Object threadsWithOpenTransactionLock = new Object();
-
-		private CaseDbTransaction(SleuthkitCase sleuthkitCase) throws TskCoreException {
+		
+		private CaseDbTransaction(SleuthkitCase sleuthkitCase, CaseDbConnection connection) throws TskCoreException {
+			this.connection = connection;
 			this.sleuthkitCase = sleuthkitCase;
-			
-			sleuthkitCase.acquireSingleUserCaseWriteLock();
-			this.connection = sleuthkitCase.getConnection();
 			try {
 				synchronized (threadsWithOpenTransactionLock) {
 					this.connection.beginTransaction();
 					threadsWithOpenTransaction.add(Thread.currentThread().getId());
 				}
 			} catch (SQLException ex) {
-				sleuthkitCase.releaseSingleUserCaseWriteLock();
 				throw new TskCoreException("Failed to create transaction on case database", ex);
 			}
-			
+			sleuthkitCase.acquireSingleUserCaseWriteLock();
 		}
 
 		/**
@@ -13264,9 +13207,10 @@ CaseDbConnection getConnection() {
 			return this.connection;
 		}
 
+		
 		/**
 		 * Saves a score change done as part of the transaction.
-		 *
+		 * 
 		 * @param scoreChange Score change.
 		 */
 		void registerScoreChange(ScoreChange scoreChange) {
@@ -13275,70 +13219,62 @@ void registerScoreChange(ScoreChange scoreChange) {
 
 		/**
 		 * Saves a host that has been added as a part of this transaction.
-		 *
 		 * @param host The host.
 		 */
 		void registerAddedHost(Host host) {
 			if (host != null) {
-				this.hostsAdded.add(host);
+				this.hostsAdded.add(host);	
 			}
 		}
-
+		
 		/**
 		 * Saves an account that has been updated as a part of this transaction.
-		 *
 		 * @param account The account.
-		 */
+		 */		
 		void registerChangedOsAccount(OsAccount account) {
 			if (account != null) {
 				accountsChanged.add(account);
 			}
 		}
-
+		
 		/**
 		 * Saves an account that has been deleted as a part of this transaction.
-		 *
 		 * @param osAccountObjId The account.
-		 */
+		 */		
 		void registerDeletedOsAccount(long osAccountObjId) {
 			deletedOsAccountObjectIds.add(osAccountObjId);
-		}
-
+		}		
+		
 		/**
 		 * Saves an account that has been added as a part of this transaction.
-		 *
 		 * @param account The account.
-		 */
+		 */	
 		void registerAddedOsAccount(OsAccount account) {
 			if (account != null) {
 				accountsAdded.add(account);
 			}
 		}
-
+		
 		/**
-		 * Saves an analysis result that has been deleted as a part of this
-		 * transaction.
-		 *
+		 * Saves an analysis result that has been deleted as a part of this transaction.
+		 * 
 		 * @param result Deleted result.
 		 */
 		void registerDeletedAnalysisResult(long analysisResultObjId) {
 			this.deletedResultObjectIds.add(analysisResultObjId);
 		}
-
 		/**
 		 * Check if the given thread has an open transaction.
-		 *
+		 * 
 		 * @param threadId Thread id to check for.
-		 *
-		 * @return True if the given thread has an open transaction, false
-		 *         otherwise.
+		 * 
+		 * @return True if the given thread has an open transaction, false otherwise.  
 		 */
 		private static boolean hasOpenTransaction(long threadId) {
 			synchronized (threadsWithOpenTransactionLock) {
 				return threadsWithOpenTransaction.contains(threadId);
 			}
 		}
-
 		/**
 		 * Commits the transaction on the case database that was begun when this
 		 * object was constructed.
@@ -13352,7 +13288,7 @@ public void commit() throws TskCoreException {
 				throw new TskCoreException("Failed to commit transaction on case database", ex);
 			} finally {
 				close();
-
+				
 				if (!scoreChangeMap.isEmpty()) {
 					// Group the score changes by data source id
 					Map<Long, List<ScoreChange>> changesByDataSource = scoreChangeMap.values().stream()
@@ -13363,7 +13299,7 @@ public void commit() throws TskCoreException {
 						sleuthkitCase.fireTSKEvent(new AggregateScoresChangedEvent(entry.getKey(), ImmutableSet.copyOf(entry.getValue())));
 					}
 				}
-
+				
 				// Fire events for any new or changed objects
 				if (!hostsAdded.isEmpty()) {
 					sleuthkitCase.fireTSKEvent(new TskEvent.HostsAddedTskEvent(hostsAdded));
@@ -14033,7 +13969,7 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 				isFile, parentFile, rederiveDetails, toolName, toolVersion,
 				otherDetails, TskData.EncodingType.NONE);
 	}
-
+	
 	/**
 	 * Adds a local/logical file to the case database. The database operations
 	 * are done within a caller-managed transaction; the caller is responsible
@@ -14060,7 +13996,7 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 	 *
 	 * @throws TskCoreException if there is an error completing a case database
 	 *                          operation.
-	 *
+	 * 
 	 * @deprecated Use the newer version with explicit sha256 parameter
 	 */
 	@Deprecated
@@ -14068,8 +14004,8 @@ public LocalFile addLocalFile(String fileName, String localPath,
 			long size, long ctime, long crtime, long atime, long mtime,
 			String md5, FileKnown known, String mimeType,
 			boolean isFile, TskData.EncodingType encodingType,
-			Content parent, CaseDbTransaction transaction) throws TskCoreException {
-
+			Content parent, CaseDbTransaction transaction) throws TskCoreException {	
+		
 		return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime,
 				md5, null, known, mimeType, isFile, encodingType,
 				parent, transaction);
diff --git a/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java b/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java
index 6b0d5174d2a06846f2de1286432d2bcbb7f89a5f..26c3fa00380b3f255f474781ad11a56c6d04e2cd 100644
--- a/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java
+++ b/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java
@@ -242,7 +242,7 @@ public void artifactTests() throws TskCoreException, Blackboard.BlackboardExcept
 		
 		
 		// Test: add a new data artifact to the file
-		DataArtifact dataArtifact1 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_SEARCH), Collections.emptyList(), osAccount1.getId());
+		DataArtifact dataArtifact1 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_SEARCH), Collections.emptyList(), osAccount1);
         
 		OsAccountManager osAcctMgr = caseDB.getOsAccountManager();
 		
@@ -251,14 +251,14 @@ public void artifactTests() throws TskCoreException, Blackboard.BlackboardExcept
 		
 		
 		// Test: add a second data artifact to file - associate it with a different account
-		DataArtifact dataArtifact2 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CLIPBOARD_CONTENT), Collections.emptyList(), osAccount2.getId());
+		DataArtifact dataArtifact2 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CLIPBOARD_CONTENT), Collections.emptyList(), osAccount2);
 		assertTrue(dataArtifact2.getOsAccountObjectId().isPresent());
 		assertTrue(osAcctMgr.getOsAccountByObjectId(dataArtifact2.getOsAccountObjectId().get()).getAddr().orElse("").equalsIgnoreCase(ownerUid2));
 				
 				
 		// and two more 
-		DataArtifact dataArtifact3 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA), Collections.emptyList(), osAccount2.getId());
-		DataArtifact dataArtifact4 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA), Collections.emptyList(), osAccount2.getId());
+		DataArtifact dataArtifact3 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA), Collections.emptyList(), osAccount2);
+		DataArtifact dataArtifact4 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA), Collections.emptyList(), osAccount2);
 
 		
 		// TEST: get all TSK_GPS_SEARCH data artifacts in the data source
@@ -355,10 +355,10 @@ public void artifactTests() throws TskCoreException, Blackboard.BlackboardExcept
 
 		// Create five data artifacts. Only three should create a row in tsk_data_artifacts.
 		DataArtifact dataArt1 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), null);
-		DataArtifact dataArt2 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), osAccount2.getId());
+		DataArtifact dataArt2 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), osAccount2);
 		BlackboardArtifact bbArt1 = defTextFile.newArtifact(dataArtType.getTypeID());
-		DataArtifact dataArt3 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), osAccount2.getId());
-		DataArtifact dataArt4 = caseDB.getBlackboard().newDataArtifact(dataArtType, defTextFile.getId(), fs.getDataSource().getId(), java.util.Collections.emptyList(), osAccount2.getId());
+		DataArtifact dataArt3 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), osAccount2);
+		DataArtifact dataArt4 = caseDB.getBlackboard().newDataArtifact(dataArtType, defTextFile.getId(), fs.getDataSource().getId(), java.util.Collections.emptyList(), osAccount2);
 		int dataArtifactCount = 5;
 		
 		// TEST: getDataArtifacts(artifact type id)
diff --git a/tsk/auto/tsk_db.h b/tsk/auto/tsk_db.h
index 3724638dc68004ce34925615ff9667245e6f60e9..0976f70e8be16488527d02bc240fe50e9bdf54d0 100755
--- a/tsk/auto/tsk_db.h
+++ b/tsk/auto/tsk_db.h
@@ -28,10 +28,10 @@ using std::vector;
 using std::string;
 
 /**
-* Do not change these values - the schema is no longer being updated in this code and does not match the current version in SleuthkitCase.java.
-*/
+ * Keep these values in sync with CURRENT_DB_SCHEMA_VERSION in SleuthkitCase.java
+ */
 #define TSK_SCHEMA_VER 8
-#define TSK_SCHEMA_MINOR_VER 4
+#define TSK_SCHEMA_MINOR_VER 6
 
 /**
  * Values for the type column in the tsk_objects table.