diff --git a/Makefile.am b/Makefile.am
index 0887b7ab02946dc55e47421c91f94c5ca9d49b64..b82318567563d11ac532d69831ff5783753dfbad 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -1,7 +1,7 @@
 # File that we want to include in the dist
 EXTRA_DIST = README_win32.txt README.md INSTALL.txt ChangeLog.txt NEWS.txt API-CHANGES.txt \
-    licenses/GNU-COPYING licenses/IBM-LICENSE \
-    licenses/cpl1.0.txt licenses/bsd.txt licenses/mit.txt \
+    licenses/README.md licenses/GNUv2-COPYING licenses/GNUv3-COPYING licenses/IBM-LICENSE \
+    licenses/Apache-LICENSE-2.0.txt licenses/cpl1.0.txt licenses/bsd.txt licenses/mit.txt \
     m4/*.m4 \
     docs/README.txt \
     packages/sleuthkit.spec \
@@ -59,7 +59,8 @@ nobase_include_HEADERS = tsk/libtsk.h tsk/tsk_incs.h \
     tsk/auto/tsk_is_image_supported.h tsk/auto/guid.h \
     tsk/pool/tsk_pool.h tsk/pool/tsk_pool.hpp tsk/pool/tsk_apfs.h tsk/pool/tsk_apfs.hpp \
 	tsk/pool/pool_compat.hpp tsk/pool/apfs_pool_compat.hpp \
-    tsk/util/crypto.hpp tsk/util/lw_shared_ptr.hpp tsk/util/span.hpp
+    tsk/util/crypto.hpp tsk/util/lw_shared_ptr.hpp tsk/util/span.hpp \
+    tsk/util/detect_encryption.h
 
 nobase_dist_data_DATA = tsk/sorter/default.sort tsk/sorter/freebsd.sort \
     tsk/sorter/images.sort tsk/sorter/linux.sort tsk/sorter/openbsd.sort \
diff --git a/NEWS.txt b/NEWS.txt
index a69e401d31037aee4cbbe5ce2da9d86767e29b32..e6ad6237e2015f7f5a733786f9a824137ee6871c 100644
--- a/NEWS.txt
+++ b/NEWS.txt
@@ -1,3 +1,24 @@
+---------------- VERSION 4.11.0 --------------
+C/C++:
+- Added checks at various layers to detect encrypted file systems and disks to give more useful error messages.
+- Added checks to detect file formats that are not supported (such as AD1, ZIP, etc.) to give more useful error messages.
+- Added tsk_imageinfo tool that detects if an image is supported by TSK and if it is encrypted.
+- Add numerous bound checks from Joachim Metz.
+- Clarified licenses as pointed out by Joachim Metz.
+
+Java:
+- Updated from Schema 8.6 to 9.1.
+- Added tables and classes for OS Accounts and Realms (Domains).
+- Added tables and classes for Host Addresses (IP, MAC, etc.).
+- Added tables and classes for Analysis Results vs Data Artifacts by adding onto BlackboardArtifacts.
+- Added tables and classes for Host and Person to make it easier to group data sources.
+- Added static types for standard artifact types.
+- Added File Attribute table to allow custom information to be stored for each file.
+- Made ordering of getting lock and connection consistent.
+- Made the findFile methods more efficient by using extension (which is indexed).
+
+
+
 ---------------- VERSION 4.10.2 --------------
 C/C++
 - Added support for Ext4 inline data
diff --git a/bindings/java/build.xml b/bindings/java/build.xml
index ba275cb92ee9df47de362e01a4255f0ec6a935dd..ab734e61205edcd5a75998146471f1590c647568 100644
--- a/bindings/java/build.xml
+++ b/bindings/java/build.xml
@@ -11,7 +11,7 @@
 	<import file="build-${os.family}.xml"/>
 
     <!-- Careful changing this because release-windows.pl updates it by pattern -->
-<property name="VERSION" value="4.10.2"/>
+<property name="VERSION" value="4.11.0"/>
 
 	<!-- set global properties for this build -->
 	<property name="default-jar-location" location="/usr/share/java"/>
diff --git a/bindings/java/doxygen/Doxyfile b/bindings/java/doxygen/Doxyfile
index 380ba0c78840caebf74e4e9a3c7d30d5ed1b40ae..b20fc027b6c600a56253ada19199495d3e51d26f 100644
--- a/bindings/java/doxygen/Doxyfile
+++ b/bindings/java/doxygen/Doxyfile
@@ -39,7 +39,7 @@ PROJECT_NAME           = "Sleuth Kit Java Bindings (JNI)"
 # control system is used.
 
 # NOTE: This is updated by the release-unix.pl script
-PROJECT_NUMBER = 4.10.2
+PROJECT_NUMBER = 4.11.0
 
 # Using the PROJECT_BRIEF tag one can provide an optional one line description
 # for a project that appears at the top of each page and should give viewer a
@@ -765,7 +765,13 @@ INPUT                  = main.dox \
                          artifact_catalog.dox \
                          insert_and_update_database.dox \
                          communications.dox \
-						 ../src
+                         datasources.dox \
+                         os_accounts.dox \
+                         schema/schema_list.dox \
+                         schema/db_schema_8_6.dox \
+                         schema/db_schema_9_0.dox \
+                         schema/db_schema_9_1.dox \
+                         ../src
 
 # This tag can be used to specify the character encoding of the source files
 # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
@@ -1050,7 +1056,7 @@ GENERATE_HTML          = YES
 # This tag requires that the tag GENERATE_HTML is set to YES.
 
 # NOTE: This is updated by the release-unix.pl script
-HTML_OUTPUT = jni-docs/4.10.2/
+HTML_OUTPUT = jni-docs/4.11.0/
 
 # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
 # generated HTML page (for example: .htm, .php, .asp).
diff --git a/bindings/java/doxygen/artifact_catalog.dox b/bindings/java/doxygen/artifact_catalog.dox
index 39d548bf7df9162b084a5f410f53da9312721e10..41a666328a03aa16346449faac896f98e8acee7d 100644
--- a/bindings/java/doxygen/artifact_catalog.dox
+++ b/bindings/java/doxygen/artifact_catalog.dox
@@ -4,7 +4,9 @@
 # Introduction
 This document reflects current standard usage of artifact and attribute types for posting analysis results to the case blackboard in Autopsy.  Refer to \ref mod_bbpage for more background on the blackboard and how to make artifacts. 
 
-The catalog section below has one entry for each standard artifact type. Each entry lists the required and optional attributes of artifacts of the type.
+The catalog section below has one entry for each standard artifact type divided by categories. Each entry lists the required and optional attributes of artifacts of the type. The category types are:
+- \ref art_catalog_analysis "Analysis Result": Result from an analysis technique on a given object with a given configuration.  Includes Conclusion, Relevance Score, and Confidence.
+- \ref art_catalog_data "Data Artifact": Data that was originally embedded by an application/OS in a file or other data container.
 
 NOTE:
 - While we have listed some attributes as "Required", nothing will enforce that they exist. Modules that use artifacts from the blackboard should assume that some of the attributes may not actually exist. 
@@ -15,9 +17,166 @@ For the full list of types, refer to:
 - org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE
 
 
-<h1>Artifacts Catalog</h1>
+\section art_catalog_analysis Analysis Result Types
 
-In alphabetical order.
+---
+## TSK_DATA_SOURCE_USAGE
+Describes how a data source was used, e.g., as a SIM card or an OS drive (such as for Windows or Android).
+
+### REQUIRED ATTRIBUTES
+- TSK_DESCRIPTION (Description of the usage, e.g., "OS Drive (Windows Vista)").
+
+---
+## TSK_ENCRYPTION_DETECTED
+An indication that the content is encrypted.
+
+### REQUIRED ATTRIBUTES
+- TSK_COMMENT (A comment on the encryption, e.g., encryption type or password)
+
+---
+## TSK_ENCRYPTION_SUSPECTED
+An indication that the content is likely encrypted.
+
+### REQUIRED ATTRIBUTES
+- TSK_COMMENT (Reason for suspecting encryption)
+
+---
+## TSK_EXT_MISMATCH_DETECTED
+An indication that the registered extensions for a file's mime type do not match the file's extension.
+
+### REQUIRED ATTRIBUTES
+None
+
+---
+## TSK_FACE_DETECTED
+An indication that a human face was detected in some content.
+
+### REQUIRED ATTRIBUTES
+None
+
+---
+## TSK_HASHSET_HIT
+Indicates that the MD5 hash of a file matches a set of known MD5s (possibly user defined).
+
+### REQUIRED ATTRIBUTES
+- TSK_SET_NAME (Name of hashset containing the file's MD5)
+
+### OPTIONAL ATTRIBUTES
+- TSK_COMMENT (Additional comments about the hit)
+
+---
+## TSK_INTERESTING_ARTIFACT_HIT
+Indicates that the source artifact matches some set of criteria which deem it interesting. Artifacts with this meta artifact will be brought to the attention of the user.
+
+### REQUIRED ATTRIBUTES
+- TSK_ASSOCIATED_ARTIFACT (The source artifact)
+- TSK_SET_NAME (The name of the set of criteria which deemed this artifact interesting)
+
+### OPTIONAL ATTRIBUTES
+- TSK_COMMENT (Comment on the reason that the source artifact is interesting)
+- TSK_CATEGORY (The set membership rule that was satisfied)
+
+---
+## TSK_INTERESTING_FILE_HIT
+Indication that the source file matches some set of criteria (possibly user defined) which deem it interesting. Files with this artifact will be brought to the attention of the user.
+
+### REQUIRED ATTRIBUTES
+- TSK_SET_NAME (The name of the set of criteria which deemed this file interesting)
+
+### OPTIONAL ATTRIBUTES
+- TSK_COMMENT (Comment on the reason that the source artifact is interesting)
+- TSK_CATEGORY (The set membership rule that was satisfied. I.e. a particular mime)
+
+---
+## TSK_KEYWORD_HIT
+Indication that the source artifact or file contains a keyword. Keywords are grouped into named sets.
+
+### REQUIRED ATTRIBUTES
+- TSK_KEYWORD (Keyword that was found in the artifact or file)
+- TSK_KEYWORD_SEARCH_TYPE (Specifies the type of match, e.g., an exact match, a substring match, or a regex match)
+- TSK_SET_NAME (The set name that the keyword was contained in)
+- TSK_KEYWORD_REGEXP (The regular expression that matched, only required for regex matches)
+- TSK_ASSOCIATED_ARTIFACT (Only required if the keyword hit source is an artifact)
+
+### OPTIONAL ATTRIBUTES
+- TSK_KEYWORD_PREVIEW (Snippet of text around keyword)
+
+---
+## TSK_OBJECT_DETECTED
+Indicates that an object was detected in a media file. Typically used by computer vision software to classify images.
+
+### REQUIRED ATTRIBUTES
+- TSK_COMMENT (What was detected)
+
+### OPTIONAL ATTRIBUTES
+- TSK_DESCRIPTION (Additional comments about the object or observer, e.g., what detected the object)
+
+---
+## TSK_USER_CONTENT_SUSPECTED
+An indication that some media file content was generated by the user.
+
+### REQUIRED ATTRIBUTES
+- TSK_COMMENT (The reason why user-generated content is suspected)
+
+---
+## TSK_VERIFICATION_FAILED
+An indication that some data did not pass verification. One example would be verifying a SHA-1 hash.
+
+### REQUIRED ATTRIBUTES
+- TSK_COMMENT (Reason for failure, what failed)
+
+---
+## TSK_WEB_ACCOUNT_TYPE
+A web account type entry. 
+
+### REQUIRED ATTRIBUTES
+- TSK_DOMAIN (Domain of the URL)
+- TSK_TEXT (Indicates type of account (admin/moderator/user) and possible platform)
+- TSK_URL (URL indicating the user has an account on this domain)
+
+---
+## TSK_WEB_CATEGORIZATION
+The categorization of a web host using a specific usage type, e.g. mail.google.com would correspond to Web Email.
+
+### REQUIRED ATTRIBUTES
+- TSK_NAME (The usage category identifier, e.g. Web Email)
+- TSK_DOMAIN (The domain of the host, e.g. google.com)
+- TSK_HOST (The full host, e.g. mail.google.com)
+
+---
+## TSK_YARA_HIT
+Indicates that the some content of the file was a hit for a YARA rule match.
+
+### REQUIRED ATTRIBUTES
+- TSK_RULE (The rule that was a hit for this file)
+- TSK_SET_NAME (Name of the rule set containing the matching rule YARA rule)
+
+---
+## TSK_METADATA_EXIF
+EXIF metadata found in an image or audio file.
+
+### REQUIRED ATTRIBUTES
+- At least one of:
+- TSK_DATETIME_CREATED (Creation date of the file, in seconds since 1970-01-01T00:00:00Z)
+- TSK_DEVICE_MAKE (Device make, generally the manufacturer, e.g., Apple)
+- TSK_DEVICE_MODEL (Device model, generally the product, e.g., iPhone)
+- TSK_GEO_ALTITUDE (The camera's altitude when the image/audio was taken)
+- TSK_GEO_LATITUDE (The camera's latitude when the image/audio was taken)
+- TSK_GEO_LONGITUDE (The camera's longitude when the image/audio was taken)## TSK_METADATA_EXIF
+EXIF metadata found in an image or audio file.
+
+### REQUIRED ATTRIBUTES
+- At least one of:
+- TSK_DATETIME_CREATED (Creation date of the file, in seconds since 1970-01-01T00:00:00Z)
+- TSK_DEVICE_MAKE (Device make, generally the manufacturer, e.g., Apple)
+- TSK_DEVICE_MODEL (Device model, generally the product, e.g., iPhone)
+- TSK_GEO_ALTITUDE (The camera's altitude when the image/audio was taken)
+- TSK_GEO_LATITUDE (The camera's latitude when the image/audio was taken)
+- TSK_GEO_LONGITUDE (The camera's longitude when the image/audio was taken)
+
+<br><br>
+
+\section art_catalog_data Data Artifact Types
 
 ---
 ## TSK_ACCOUNT
@@ -48,11 +207,9 @@ Details about System/aplication/file backups.
 
 ### REQUIRED ATTRIBUTES
 - TSK_DATETIME_START (Date/Time the backup happened)
-     or 
-  TSK_DATETIME
   
 ### OPTIONAL ATTRIBUTES
-- TSK_DATETIME_ENDED (Date/Time the backup ended)
+- TSK_DATETIME_END (Date/Time the backup ended)
 
 
 
@@ -62,7 +219,9 @@ Details about a Bluetooth adapter.
 
 ### REQUIRED ATTRIBUTES
 - TSK_MAC_ADDRESS (MAC address of the Bluetooth adapter)
-
+- TSK_NAME (Name of the device)
+- TSK_DATETIME (Time device was last seen)
+- TSK_DEVICE_ID (UUID of the device)
 
 
 ---
@@ -75,7 +234,8 @@ Details about a Bluetooth pairing event.
 ### OPTIONAL ATTRIBUTES
 - TSK_DATETIME (When the pairing occurred, in seconds since 1970-01-01T00:00:00Z)
 - TSK_MAC_ADDRESS (MAC address of the Bluetooth device)
-
+- TSK_DEVICE_ID (UUID of the device)
+- TSK_DATETIME_ACCESSED (Last Connection Time)
 
 
 ---
@@ -85,9 +245,9 @@ A calendar entry in an application file or database.
 ### REQUIRED ATTRIBUTES
 - TSK_CALENDAR_ENTRY_TYPE (E.g., Reminder, Event, Birthday, etc.)
 - TSK_DATETIME_START (Start of the entry, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DESCRIPTION (Description of the entry, such as a note)
 
 ### OPTIONAL ATTRIBUTES
+- TSK_DESCRIPTION (Description of the entry, such as a note)
 - TSK_LOCATION (Location of the entry, such as an address)
 - TSK_DATETIME_END (End of the entry, in seconds since 1970-01-01T00:00:00Z)
 
@@ -141,22 +301,13 @@ A contact book entry in an application file or database.
 
 
 
----
-## TSK_DATA_SOURCE_USAGE
-Describes how a data source was used, e.g., as a SIM card or an OS drive (such as for Windows or Android).
-
-### REQUIRED ATTRIBUTES
-- TSK_DESCRIPTION (Description of the usage, e.g., "OS Drive (Windows Vista)").
-
 
 ---
 ## TSK_DELETED_PROG
 Programs that have been deleted from the system.
 
 ### REQUIRED ATTRIBUTES
-- TSK_DATETIME_DELETED (Date/Time the program was deleted)
-     or 
-  TSK_DATETIME
+- TSK_DATETIME (Date/Time the program was deleted)
 - TSK_PROG_NAME (Program that was deleted)
 
 ### OPTIONAL Attributes
@@ -213,52 +364,12 @@ An email message found in an application file or database.
 - TSK_SUBJECT (Subject of the email message)
 - TSK_THREAD_ID (ID specified by the analysis module to group emails into threads for display purposes)
 
-
-
----
-## TSK_ENCRYPTION_DETECTED
-An indication that the content is encrypted.
-
-### REQUIRED ATTRIBUTES
-- TSK_COMMENT (A comment on the encryption, e.g., encryption type or password)
-
-
-
----
-## TSK_ENCRYPTION_SUSPECTED
-An indication that the content is likely encrypted.
-
-### REQUIRED ATTRIBUTES
-- TSK_COMMENT (Reason for suspecting encryption)
-
-
-
 ---
 ## TSK_EXTRACTED_TEXT
 Text extracted from some content.
 
 ### REQUIRED ATTRIBUTES
-TSK_TEXT (The extracted text)
-
-
-
----
-## TSK_EXT_MISMATCH_DETECTED
-An indication that the registered extensions for a file's mime type do not match the file's extension.
-
-### REQUIRED ATTRIBUTES
-None
-
-
-
----
-## TSK_FACE_DETECTED
-An indication that a human face was detected in some content.
-
-### REQUIRED ATTRIBUTES
-None
-
-
+- TSK_TEXT (The extracted text)
 
 ---
 ## TSK_GEN_INFO
@@ -359,18 +470,6 @@ A Global Positioning System (GPS) track artifact records the track, or path, of
 
 
 
----
-## TSK_HASHSET_HIT
-Indicates that the MD5 hash of a file matches a set of known MD5s (possibly user defined).
-
-### REQUIRED ATTRIBUTES
-- TSK_SET_NAME (Name of hashset containing the file's MD5)
-
-### OPTIONAL ATTRIBUTES
-- TSK_COMMENT (Additional comments about the hit)
-
-
-
 ---
 ## TSK_INSTALLED_PROG
 Details about an installed program. 
@@ -379,56 +478,11 @@ Details about an installed program.
 - TSK_PROG_NAME (Name of the installed program)
 
 ### OPTIONAL ATTRIBUTES
-- TSK_DATETIME (A date and time associated with the installed program, e.g., the last modified time, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DATETIME_CREATED (When the program was installed, in seconds since 1970-01-01T00:00:00Z)
+- TSK_DATETIME (When the program was installed, in seconds since 1970-01-01T00:00:00Z)
 - TSK_PATH (Path to the installed program in the data source)
 - TSK_PATH_SOURCE (Path to an Android Package Kit (APK) file for an Android program)
 - TSK_PERMISSIONS (Permissions of the installed program)
-
-
-
----
-## TSK_INTERESTING_ARTIFACT_HIT
-Indicates that the source artifact matches some set of criteria which deem it interesting. Artifacts with this meta artifact will be brought to the attention of the user.
-
-### REQUIRED ATTRIBUTES
-- TSK_ASSOCIATED_ARTIFACT (The source artifact)
-- TSK_SET_NAME (The name of the set of criteria which deemed this artifact interesting)
-
-### OPTIONAL ATTRIBUTES
-- TSK_COMMENT (Comment on the reason that the source artifact is interesting)
-- TSK_CATEGORY (The set membership rule that was satisfied)
-
-
-
----
-## TSK_INTERESTING_FILE_HIT
-Indication that the source file matches some set of criteria (possibly user defined) which deem it interesting. Files with this artifact will be brought to the attention of the user.
-
-### REQUIRED ATTRIBUTES
-- TSK_SET_NAME (The name of the set of criteria which deemed this file interesting)
-
-### OPTIONAL ATTRIBUTES
-- TSK_COMMENT (Comment on the reason that the source artifact is interesting)
-- TSK_CATEGORY (The set membership rule that was satisfied. I.e. a particular mime)
-
-
-
----
-## TSK_KEYWORD_HIT
-Indication that the source artifact or file contains a keyword. Keywords are grouped into named sets.
-
-### REQUIRED ATTRIBUTES
-- TSK_KEYWORD (Keyword that was found in the artifact or file)
-- TSK_KEYWORD_SEARCH_TYPE (Specifies the type of match, e.g., an exact match, a substring match, or a regex match)
-- TSK_SET_NAME (The set name that the keyword was contained in)
-- TSK_KEYWORD_REGEXP (The regular expression that matched, only required for regex matches)
-- TSK_ASSOCIATED_ARTIFACT (Only required if the keyword hit source is an artifact)
-
-### OPTIONAL ATTRIBUTES
-- TSK_KEYWORD_PREVIEW (Snippet of text around keyword)
-
-
+- TSK_VERSION (Version number of the program)
 
 ---
 ## TSK_MESSAGE
@@ -471,60 +525,6 @@ None
 - TSK_USER_ID (Last author of the document)
 - TSK_VERSION (Version number of the program used to create the document)
 
----
-## TSK_METADATA_EXIF
-EXIF metadata found in an image or audio file.
-
-### REQUIRED ATTRIBUTES
-- At least one of:
-- TSK_DATETIME_CREATED (Creation date of the file, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DEVICE_MAKE (Device make, generally the manufacturer, e.g., Apple)
-- TSK_DEVICE_MODEL (Device model, generally the product, e.g., iPhone)
-- TSK_GEO_ALTITUDE (The camera's altitude when the image/audio was taken)
-- TSK_GEO_LATITUDE (The camera's latitude when the image/audio was taken)
-- TSK_GEO_LONGITUDE (The camera's longitude when the image/audio was taken)
-
-
----
-## TSK_OBJECT_DETECTED
-Indicates that an object was detected in a media file. Typically used by computer vision software to classify images.
-
-### REQUIRED ATTRIBUTES
-- TSK_COMMENT (What was detected)
-
-### OPTIONAL ATTRIBUTES
-- TSK_DESCRIPTION (Additional comments about the object or observer, e.g., what detected the object)
-
-
-
----
-## TSK_OS_ACCOUNT
-Details about an operating system account recovered from the data source. Examples include user or administrator accounts.
-
-### REQUIRED ATTRIBUTES
-- TSK_ACCOUNT_TYPE (Account type, e.g., Administrator, User, etc.)
-- TSK_USER_NAME (The user name associated with the account)
-
-### OPTIONAL ATTRIBUTES
-- TSK_ACCOUNT_SETTINGS (Account settings such as if the account is set to auto lock or requires a home directory)
-- TSK_COUNT (Number of logins)
-- TSK_DATETIME_ACCESSED (Datetime of last login, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DATETIME_CREATED (Datetime of account creation, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DATETIME_PASSWORD_FAIL (Datetime of the last failed login, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DATETIME_PASSWORD_RESET (Datetime of last password reset, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DESCRIPTION (Description of the account, e.g., "My personal school account")
-- TSK_DISPLAY_NAME (Full name of the user associated with the account)
-- TSK_EMAIL (Email address associated with the account)
-- TSK_FLAG (Account flags such as indication that the account is a server trust account) 
-- TSK_GROUPS (Groups that this account is included in)
-- TSK_PASSWORD_HINT (The password hint description)
-- TSK_PASSWORD_SETTINGS (Password settings such as if the password has been set to expire or is required for login)
-- TSK_PATH (Home directory of the account. Ex: "C:/Users/John/")
-- TSK_USER_ID (User security identifier, e.g., SID)
-- TSK_NAME (Name of person associated with the account)
-
-
-
 ---
 ## TSK_OS_INFO
 Details about an operating system recovered from the data source.
@@ -547,7 +547,7 @@ Details about an operating system recovered from the data source.
 
 
 ---
-## TSK_PROG_NOTIFICATION
+## TSK_PROG_NOTIFICATIONS
 Notifications to the user.
 
 ### REQUIRED ATTRIBUTES
@@ -566,9 +566,9 @@ The number of times a program/application was run.
 
 ### REQUIRED ATTRIBUTES
 - TSK_PROG_NAME (Name of the application)
-- TSK_COUNT (Number of times program was run, should be at least 1)
 
 ### OPTIONAL ATTRIBUTES
+- TSK_COUNT (Number of times program was run, should be at least 1)
 - TSK_DATETIME (Timestamp that application was run last, in seconds since 1970-01-01T00:00:00Z)
 - TSK_BYTES_SENT (Number of bytes sent)
 - TSK_BYTES_RECEIVED (Number of bytes received)
@@ -676,26 +676,12 @@ An event in the timeline of a case.
 - TSK_DATETIME (When the event occurred, in seconds since 1970-01-01T00:00:00Z)
 - TSK_DESCRIPTION (A description of the event)
 
-
-
----
-## TSK_USER_CONTENT_SUSPECTED
-An indication that some media file content was generated by the user.
-
-### REQUIRED ATTRIBUTES
-- TSK_COMMENT (The reason why user-generated content is suspected)
-
-
 ---
-
 ## TSK_USER_DEVICE_EVENT
-Activity on the system or from an application.  Example usage is a mobile device being locked and unlocked.
-. 
+Activity on the system or from an application.  Example usage is a mobile device being locked and unlocked. 
 
 ### REQUIRED ATTRIBUTES
 - TSK_DATETIME_START (When activity started)
-    or 
-  TSK_DATETIME
 
 ### OPTIONAL ATTRIBUTES
 - TSK_ACTIVITY_TYPE (Activity type i.e.: On or Off)
@@ -703,27 +689,6 @@ Activity on the system or from an application.  Example usage is a mobile device
 - TSK_PROG_NAME (Name of the program doing the activity)
 - TSK_VALUE (Connection type)
 
-
----
-## TSK_VERIFICATION_FAILED
-An indication that some data did not pass verification. One example would be verifying a SHA-1 hash.
-
-### REQUIRED ATTRIBUTES
-- TSK_COMMENT (Reason for failure, what failed)
-
-
-
----
-## TSK_WEB_ACCOUNT_TYPE
-A web account type entry. 
-
-### REQUIRED ATTRIBUTES
-- TSK_DOMAIN (Domain of the URL)
-- TSK_TEXT (Indicates type of account (admin/moderator/user) and possible platform)
-- TSK_URL (URL indicating the user has an account on this domain)
-
-
-
 ---
 ## TSK_WEB_BOOKMARK
 A web bookmark entry.
@@ -738,8 +703,6 @@ A web bookmark entry.
 - TSK_NAME (Name of the bookmark entry)
 - TSK_TITLE (Title of the web page that was bookmarked)
 
-
-
 ---
 ## TSK_WEB_CACHE
 A web cache entry. The resource that was cached may or may not be present in the data source.
@@ -754,19 +717,6 @@ A web cache entry. The resource that was cached may or may not be present in the
 - TSK_PATH_ID (Object ID of the source cache file)
 - TSK_DOMAIN (Domain of the URL)
 
-
-
----
-## TSK_WEB_CATEGORIZATION
-The categorization of a web host using a specific usage type, e.g. mail.google.com would correspond to Web Email.
-
-### REQUIRED ATTRIBUTES
-- TSK_NAME (The usage category identifier, e.g. Web Email)
-- TSK_DOMAIN (The domain of the host, e.g. google.com)
-- TSK_HOST (The full host, e.g. mail.google.com)
-
-
-
 ---
 ## TSK_WEB_COOKIE
 A Web cookie found.
@@ -777,8 +727,8 @@ A Web cookie found.
 - TSK_VALUE (The Web cookie value attribute)
 
 ### OPTIONAL ATTRIBUTES
+- TSK_DATETIME_ACCESSED (Datetime the Web Cookie was last accessed, in seconds since 1970-01-01T00:00:00Z)
 - TSK_DATETIME_CREATED (Datetime the Web cookie was created, in seconds since 1970-01-01T00:00:00Z)
-- TSK_DATETIME_START (Datetime the Web cookie session was started, in seconds since 1970-01-01T00:00:00Z)
 - TSK_DATETIME_END (Expiration datetime of the Web cookie, in seconds since 1970-01-01T00:00:00Z)
 - TSK_DOMAIN (The domain the Web cookie serves)
 - TSK_PROG_NAME (Name of the application or application extractor that stored the Web cookie)
@@ -809,6 +759,7 @@ Contains autofill data for a person's address. Form data is usually saved by a W
 - TSK_LOCATION (The address of the person, e.g., 123 Main St.)
 
 ### OPTIONAL ATTRIBUTES
+- TSK_COMMENT (Comment if the autofill data is encrypted)
 - TSK_COUNT (Number of times the Web form data was used)
 - TSK_DATETIME_ACCESSED (Last accessed timestamp of the Web form data, in seconds since 1970-01-01T00:00:00Z)
 - TSK_DATETIME_MODIFIED (Last modified timestamp of the Web form data, in seconds since 1970-01-01T00:00:00Z)
@@ -817,7 +768,6 @@ Contains autofill data for a person's address. Form data is usually saved by a W
 - TSK_PHONE_NUMBER (Phone number from the form data)
 
 
-
 ---
 ## TSK_WEB_FORM_AUTOFILL
 Contains autofill data for a Web form. Form data is usually saved by a Web browser. Each field value pair in the form should be stored in separate artifacts.
@@ -828,10 +778,11 @@ Contains autofill data for a Web form. Form data is usually saved by a Web brows
 - TSK_VALUE (Value of the autofill field)
 
 ### OPTIONAL ATTRIBUTES
+- TSK_COMMENT (Comment if the form autofill data is encrypted)
 - TSK_COUNT (Number of times this Web form data has been used)
 - TSK_DATETIME_CREATED (Datetime this Web form autofill data was created, in seconds since 1970-01-01T00:00:00Z)
 - TSK_DATETIME_ACCESSED (Datetime this Web form data was last accessed, in seconds since 1970-01-01T00:00:00Z)
-
+- TSK_PROG_NAME (The application that stored this form information)
 
 
 ---
@@ -849,6 +800,7 @@ A Web history entry.
 - TSK_TITLE (Title of the Web page that was visited)
 - TSK_URL_DECODED (The decoded URL)
 - TSK_USER_NAME (Name of the user that viewed the Web page)
+- TSK_DATETIME_CREATED (The datetime the page was created, ie: offline pages)
 
 
 
@@ -876,7 +828,8 @@ Details about a WiFi network.
 ### OPTIONAL ATTRIBUTES
 - TSK_DATETIME (Timestamp, in seconds since 1970-01-01T00:00:00Z. This timestamp could be last connected time or creation time)
 - TSK_DEVICE_ID (String that uniquely identifies the WiFi network)
-
+- TSK_MAC_ADDRESS (Mac address of the adapter)
+- TSK_DEVICE_MODEL (Model of the decvice)
 
 
 ---
@@ -888,14 +841,4 @@ Details about a WiFi adapter.
 
 
 
----
-## TSK_YARA_HIT
-Indicates that the some content of the file was a hit for a YARA rule match.
-
-### REQUIRED ATTRIBUTES
-- TSK_RULE (The rule that was a hit for this file)
-- TSK_SET_NAME (Name of the rule set containing the matching rule YARA rule)
-
-
-
 */
diff --git a/bindings/java/doxygen/blackboard.dox b/bindings/java/doxygen/blackboard.dox
index 68c4ee1dabbbf1a5bd2980594c5c45bce2d6cc05..fc51e88b9699fcfbdcc4d6603340602e1f134876 100644
--- a/bindings/java/doxygen/blackboard.dox
+++ b/bindings/java/doxygen/blackboard.dox
@@ -22,19 +22,30 @@ The second special type of artifact is the TSK_ASSOCIATED_OBJECT. All artifacts
 
 \section jni_bb_access Accessing the Blackboard
 
-Modules can access the blackboard from either org.sleuthkit.datamodel.SleuthkitCase or a org.sleuthkit.datamodel.Content object.  The methods associated with org.sleuthkit.datamodel.Content all limit the Blackboard to a specific file.
+Modules can access the blackboard from either org.sleuthkit.datamodel.SleuthkitCase, org.sleuthkit.datamodel.Blackboard, or a org.sleuthkit.datamodel.Content object.  The methods associated with org.sleuthkit.datamodel.Content all limit the Blackboard to a specific file.
 
 \subsection jni_bb_access_post Posting to the Blackboard
 
-The first thing you need to do is create the artifact.  All artifacts must be associated with a Content object.  You can do this by creating an instance of org.sleuthkit.datamodel.BlackboardArtifact by calling either:
-- org.sleuthkit.datamodel.Content.newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) on the Content object you are adding the artifact to
-- org.sleuthkit.datamodel.SleuthkitCase.newBlackboardArtifact(ARTIFACT_TYPE artifactType, long obj_id) or a variation.
+First you need to decide what type of artifact you are making and what category it is. Artifact types fall into two categories:
+<ul>
+<li>Analysis Result: Result from an analysis technique on a given object with a given configuration. Includes Conclusion, Relevance Score, and Confidence.
+<li>Data Artifact: Data that was originally embedded by an application/OS in a file or other data container.
+</ul>
+Consult the \ref artifact_catalog_page "artifact catalog" for a list of built-in types and what categories they belong to. If you are creating a data artifact, you can optionally add an OS account to it. If you are creating an analysis result, you can optionally add a score and other notes about the result.
 
-With either of these approaches, the artifact is created in the database immediately. 
+There are may ways to create artifacts, but we will focus on creating them through the Blackboard class or directly through a Content object. Regardless of how they are created, all artifacts must be associated with a Content object. 
 
-If you want to create an attribute in the TSK_GEN_INFO artifact, use org.sleuthkit.datamodel.Content.getGenInfoArtifact() to ensure that you do not create a second TSK_GEN_INFO artifact for the file and to ensure that you used the cached version (which will be faster for you). 
+<ul>
+<li>org.sleuthkit.datamodel.AbstractContent.newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId)
+<li>org.sleuthkit.datamodel.AbstractContent.newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList)
+<li>org.sleuthkit.datamodel.Blackboard.newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId, Collection<BlackboardAttribute> attributes, Long osAccountId)
+<li>org.sleuthkit.datamodel.Blackboard.newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, 
+			String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, CaseDbTransaction transaction)
+</ul>
 
-Next, you need to make attributes and add them to the artifact.  Attributes are created by making a new instance of org.sleuthkit.datamodel.BlackboardAttribute using one of the various constructors. After you create one with the correct type and value, you add it to the artifact using org.sleuthkit.datamodel.BlackboardArtifact.addAttribute() (or org.sleuthkit.datamodel.BlackboardArtifact.addAttributes() if you have several to add - it’s faster). Note that you should not manually add attributes of type JSON for standard attribute types such as TSK_ATTACHMENTS or TSK_GEO_TRACKPOINTS. Instead, you should use the helper classes in org.sleuthkit.datamodel.blackboardutils.attributes or org.sleuthkit.datamodel.blackboardutils to create your artifacts.
+Attributes are created by making a new instance of org.sleuthkit.datamodel.BlackboardAttribute using one of the various constructors. Attributes can either be added when creating the artifact using the methods in the above list or at a later time using org.sleuthkit.datamodel.BlackboardArtifact.addAttribute() (or org.sleuthkit.datamodel.BlackboardArtifact.addAttributes() if you have several to add - it’s faster). Note that you should not manually add attributes of type JSON for standard attribute types such as TSK_ATTACHMENTS or TSK_GEO_TRACKPOINTS. Instead, you should use the helper classes in org.sleuthkit.datamodel.blackboardutils.attributes or org.sleuthkit.datamodel.blackboardutils to create your artifacts.
+
+If you want to create an attribute in the TSK_GEN_INFO artifact, use org.sleuthkit.datamodel.Content.getGenInfoArtifact() to ensure that you do not create a second TSK_GEN_INFO artifact for the file and to ensure that you used the cached version (which will be faster for you). 
 
 \subsubsection jni_bb_artifact2 Creating Multiple Artifacts or Multiple Attributes
 
diff --git a/bindings/java/doxygen/datasources.dox b/bindings/java/doxygen/datasources.dox
new file mode 100644
index 0000000000000000000000000000000000000000..882596cadb2a8c0932a696bc4f3f0c7eb32aded4
--- /dev/null
+++ b/bindings/java/doxygen/datasources.dox
@@ -0,0 +1,33 @@
+/*! \page mod_dspage Data Sources, Hosts, and Persons
+  
+\section ds_overview Overview
+This page outlines some of the core concepts around data sources and how they are organized. 
+
+\section ds_ds Data Sources
+A org.sleuthkit.datamodel.DataSource represents a set of data that has been added to a case. Examples of data sources include:
+- A disk or phone image
+- A set of logical files
+- A report from another forensics tool
+
+The objects in the case database are generally organized in a tree structure. The data sources are often the set of top-most items in the tree. 
+You can call org.sleuthkit.datamodel.SleuthkitCase.getDataSources() to get all of data sources in a case. From there you can call getChildren() to go down the tree. For example, you can go from the disk image to volumes, then to file systems, and finally to files and subfolders. 
+
+You can add data sources by various SleuthkitCase methods, such as org.sleuthkit.datamodel.SleuthkitCase.makeAddImageProcess(). 
+
+
+\section ds_hosts Hosts
+All data sources must be associated with a org.sleuthkit.datamodel.Host. A host represents the device that the data source came from.  Some hosts will have only a single data source, for example when analyzing a computer with one hard drive. Other hosts may have multiple data sources, for example when analyzing a phone with an image of the handset and another image of a media card. 
+
+If you later learn that two data sources are from the same device, you can merge the hosts. 
+
+Hosts are managed from org.sleuthkit.datamodel.HostManager. 
+
+NOTE: Hosts are different from org.sleuthkit.datamodel.HostAddress. A Host is for devices that were seized and added to the case.  A HostAddress is for an address of any external host that was found during the analysis of a data source. For example, a HostAddress for "www.sleuthkit.org" could be created based on web history artifacts. 
+
+
+\section ds_person Persons
+You can optionally associate a host with a org.sleuthkit.datamodel.Person. This can allow you to more easily organize data in a large case. The concept is that you have multiple data sources representing different devices that are all owned or used by a given person. You can group that person's data sources together. 
+
+Persons are managed from org.sleuthkit.datamodel.PersonManager. 
+
+*/
diff --git a/bindings/java/doxygen/footer.html b/bindings/java/doxygen/footer.html
index de28741e239b3ac00796b7be6903c316ea092a8a..db83baa9051f977a4eb5ec10fd20eb1d4d7dba0d 100644
--- a/bindings/java/doxygen/footer.html
+++ b/bindings/java/doxygen/footer.html
@@ -1,5 +1,5 @@
 <hr/>
-<p><i>Copyright &#169; 2011-2020 Brian Carrier.  (carrier -at- sleuthkit -dot- org)<br/> 
+<p><i>Copyright &#169; 2011-2021 Brian Carrier.  (carrier -at- sleuthkit -dot- org)<br/> 
 This work is licensed under a
 <a rel="license" href="http://creativecommons.org/licenses/by-sa/3.0/us/">Creative Commons Attribution-Share Alike 3.0 United States License</a>.
 </i></p>
diff --git a/bindings/java/doxygen/main.dox b/bindings/java/doxygen/main.dox
index 45314c50f742d85bacc6315223297ac3f44f7de1..bc31b67c566e5b2a4ef93d07a29a5abfc2f8a40f 100644
--- a/bindings/java/doxygen/main.dox
+++ b/bindings/java/doxygen/main.dox
@@ -37,9 +37,18 @@ You can also access the data in its tree form by starting with org.sleuthkit.dat
 
 \section main_other Other Topics
 
+- \subpage mod_dspage describes data source organization 
+- \subpage mod_os_accounts_page
 - \subpage mod_bbpage is where analysis modules (such as those in Autopsy) can post and save their results. 
 - The \subpage artifact_catalog_page gives a list of the current artifacts and attributes used on \ref mod_bbpage.
 - \subpage mod_compage is where analysis modules can store and retrieve communications-related data. 
+
+\section main_db Database Topics
+The Sleuth Kit has its own database schema that is shared with Autopsy and other tools. The primary way it gets populated is via the Java code. 
+
+- Database Schema Documentation:
+ - \subpage db_schema_9_1_page 
+ - \subpage db_schema_page "Older schemas"
 - Refer to \subpage query_database_page if you are going to use one of the SleuthkitCase methods that requires you to specify a query. 
 - Refer to \subpage insert_and_update_database_page if you are a Sleuth Kit developer and want to avoid database issues.
 
diff --git a/bindings/java/doxygen/os_accounts.dox b/bindings/java/doxygen/os_accounts.dox
new file mode 100644
index 0000000000000000000000000000000000000000..dd15fc6026dc94537411f00306047f2a901d6f17
--- /dev/null
+++ b/bindings/java/doxygen/os_accounts.dox
@@ -0,0 +1,118 @@
+/*! \page mod_os_accounts_page OS Accounts and Realms
+
+\section os_acct_overview Overview
+
+This page outlines some of the core concepts around OS accounts and realms and how they are stored.
+OS accounts are unique data types in the TSK datamodel and have more complexity than other types because
+we often will not fully understand the details when creating the OS accounts early in the processing and will 
+need to update them at various points as analysis continues. 
+
+\section os_acct_basics Basic Terminology
+
+- An <b>OS account</b> allows a person to do some action or access some resource on a device. 
+- A <b>realm</b> is the scope in which the OS account is defined. A realm can be scoped to a single host (i.e., for accounts that exist only on a single host) or to a network domain (such as Windows domain accounts). 
+
+
+\section os_acct_challenges OS Account Challenges
+
+A key challenge with OS accounts is that we do not know the account information until we have started to parse files, and the more detailed information will only come from OS configuration files. It is also possible that we may never know the details if we have only a media card.  
+
+As a user adds a disk image to the case, we may learn about addresses from the files. But, we won't yet know the account name or if it is domain-scoped or local-scoped. So, the basic properties of the realm and account may change as more data is ingested and analyzed. This could even result in needing to merge realms and accounts.
+
+Another difference from other data types in the TSK data model is that OS accounts may span multiple data sources if they are domain accounts. Therefore, they are not "children" of a data source and exist outside of the usual tree model in TSK. 
+
+\section os_acct_realm OS Account Realms
+
+An org.sleuthkit.datamodel.OsAccountRealm represents the scope of a set of OS accounts. A realm's scope is defined by org.sleuthkit.datamodel.OsAccountRealm.RealmScope. By default, the scope is set to host-level and the org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence is set to inferred. As more is learned, the confidence and scope can be made more specific. 
+
+A realm has two core fields:
+- Address that the OS uses internally, such as part of a Windows SID
+- Name that is what users more often see
+
+When searching for realms, the address has priority over the name. Often times with Windows systems, we may have a realm address from SIDs but not a specific realm name. 
+
+Realms are managed by org.sleuthkit.datamodel.OsAccountRealmManager.
+
+
+\section os_acct_acct OS Accounts
+
+An org.sleuthkit.datamodel.OsAccount represents an account that was configured in an operating system. It must be defined within the scope of an OsAccountRealm.  
+
+An OS account has two core fields:
+- Login name that the user enters (such as jdoe)
+- Address that the operating system uses internally (such as a UID of 0 or a Windows SID) 
+
+OS accounts also have other properties, such as full name, creation date, etc., that can be set after the account is created. 
+
+OS accounts are managed by org.sleuthkit.datamodel.OsAccountManager.
+
+\subsection os_acct_acct_os Supported Operating Systems
+
+At this point, APIs exist for only Windows accounts, such as: 
+- org.sleuthkit.datamodel.OsAccountManager.newWindowsOsAccount()
+- org.sleuthkit.datamodel.OsAccountManager.getWindowsOsAccount()
+
+The underlying database schema supports other operating systems, but the utility APIs do not exist to populate them other than with Windows SIDs. These methods may be added in the future.
+
+\section os_account_storing Storing Original Account Data
+
+We recommend that the OS account addresses or names that were parsed from the data source be saved alongside any references to OsAccount objects. For example, the case database stores the UID or SID that was stored in a file system for a file in addition to the reference to the OsAccount object that is associated with that address.  This helps to ensure the original data is preserved in case an Os account can't be created, gets deleted, or is incorrectly merged. 
+
+
+\section os_acct_example Example Creation & Update Code
+
+There are three unique elements to creating and updating OS accounts when adding data to the case database:
+
+<ol>
+<li>When creating and updating OS accounts in the case database, you need to avoid some pitfalls involving doing a lot of work in a transaction. Why? For single-user cases, if you have created a org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction, you should never call another database access method unless it allows you to pass in the CaseDbTransaction you are using. Otherwise, the method that you call will attempt to create its own transaction and because you already have the underlying SQLite case database locked, the called method will block forever waiting for a lock it cannot obtain. For a multi-user case, you will run the risk of attempting to create OS accounts in the case database that would duplicate accounts created by another user on another machine. In this scenario, uniqueness constraints will cause your entire transaction to fail and everything you have done up to that point will be rolled back and will have to be redone.
+
+This means that if you want to use a CaseDbTransation to add a lot of files or artifacts associated with OS accounts, you'll need to:
+<ol type="a">
+<li>Pre-process the data to identify what OS accounts you need to create or look up 
+<li>Look up or create the OS accounts in individual transactions 
+<li>Start a new transaction and add the files or artifacts with the references to the OS accounts
+</ol>
+
+<li>You need to check if you have more information than what is already stored (e.g., maybe the realm name was unknown).
+
+<li>You need to record that an OS account was referenced on a given data source because OS accounts are stored in parallel to data sources and are not children of them.
+</ol> 
+
+Here are some examples.
+
+\subsection os_acct_ex_get Adding a File or Data Artifact
+
+If you pass in an OsAccount to the various methods to add files and data artifacts, then the database will make the association and record the occurence. All you need to do is get the account.  You can do that with org.sleuthkit.datamodel.OsAccountManager.getWindowsOsAccount(). Note that sometimes that call will fail if the SID associated with the file is for a group, for example, if the OS account has admin rights. 
+
+If you get an OsAccount, you can try to update it if you think you may have new information. 
+
+Here is example pseudo-code:
+
+\code
+OsAccount osAcct = null;
+
+try {
+    Optional<OsAccount> osAcctOpt = getWindowsOsAccount("S-....", "jdoe", "ACME", host);
+    if (osAcctOpt.isPresent(())  {
+            osAcct = osAcctOpt.get();
+            updateWindowsOsAccount(osAccount, "S-.....", "jdoe", "ACME", host);
+    }
+    else {
+            osAcct = newWindowsOsAccount("S-....", "jdoe", "ACME", host)
+    }
+}
+catch (NotUserSIDException ex) {
+    // Ignore this SID
+}
+
+// Pass in osAcct when making artifacts and files 
+\endcode
+
+\subsection os_acct_ex_update Parsing OS Configuration Data
+
+When parsing the Windows registry or other OS Configuration file, you may find updated information about OS accounts.  You can call various org.sleuthkit.datamodel.OsAccountManager methods to get and update the accounts.  When adding extended attributes, you can choose to limit the scope of the attribute to the single host being parsed or to the domain-level.  
+
+You should make sure to call org.sleuthkit.datamodel.OsAccountManager.newOsAccountInstance() to ensure it is recorded that there was at least some reference to account on that data source. Otherwise, it will not be associated with the data source unless there were also files or artifacts that were mapped to the OS account. 
+
+
+*/
diff --git a/bindings/java/doxygen/schema/db_schema_8_6.dox b/bindings/java/doxygen/schema/db_schema_8_6.dox
new file mode 100644
index 0000000000000000000000000000000000000000..83215b7315b6c5992154784294fcfdd05e4b4e84
--- /dev/null
+++ b/bindings/java/doxygen/schema/db_schema_8_6.dox
@@ -0,0 +1,378 @@
+/*! \page db_schema_8_6_page TSK & Autopsy Database Schema (Schema version 8.6)
+
+[TOC]
+
+# Introduction
+
+This page outlines version 8.6 the database that is used by The Sleuth Kit and Autopsy. The goal of this page is to provide short descriptions for each table and column and not focus on foreign key requirements, etc. If you want that level of detail, then refer to the actual schema in addition to this. 
+
+You can find a basic graphic of some of the table relationships <a href="https://docs.google.com/drawings/d/1omR_uUAp1fQt720oJ-kk8C48BXmVa3PNjPZCDdT0Tb4/edit?usp#sharing">here</a>
+
+
+Some general notes on this schema:
+
+- Nearly every type of data is assigned a unique ID, called the Object ID
+- The objects form a hierarchy, that shows where data came from.  A child comes from its parent.  
+ - For example, disk images are the root, with a volume system below it, then a file system, and then files and directories. 
+- This schema has been designed to store data beyond the file system data that The Sleuth Kit supports. It can store carved files, a folder full of local files, etc.
+- The Blackboard is used to store artifacts, which contain attributes (name/value pairs).  Artifacts are used to store data types that do not have more formal tables. Module writers can make whatever artifact types they want. See \ref mod_bbpage for more details. 
+- The Sleuth Kit will make virtual files to span the unallocated space.  They will have a naming format of 'Unalloc_[PARENT-OBJECT-ID]_[BYTE-START]_[BYTE-END]'.
+
+# Schema Information
+
+- <b>Autopsy versions: </b> Autopsy 4.18
+- <b>Changes from version 8.5:</b>
+ - New column for SHA-256 hash in tsk_files
+
+
+# General Information Tables 
+## tsk_db_info 
+Metadata about the database.
+- **schema_ver** - Major version number of the current database schema
+- **tsk_ver** - Version of TSK used to create database
+- **schema_minor_version** - Minor version number of the current database schema
+
+## tsk_db_info_extended
+Name & Value pair table to store any information about the database.  For example, which schema it was created with. etc. 
+- **name** - Any string name
+- **value** - Any string value
+
+
+# Object Tables 
+## tsk_objects 
+Every object (image, volume system, file, etc.) has an entry in this table.  This table allows you to find the parent of a given object and allows objects to be tagged and have children.  This table provides items with a unique object id.  The details of the object are in other tables.  
+- **obj_id** - Unique id 
+- **par_obj_id** - The object id of the parent object (NULL for root objects). The parent of a volume system is an image, the parent of a directory is a directory or filesystem, the parent of a filesystem is a volume or an image, etc.
+- **type** - Object type (as org.sleuthkit.datamodel.TskData.ObjectType enum)
+
+
+# Data Source/Device Tables 
+## data_source_info
+Contains information about a data source, which could be an image.  This is where we group data sources into devices (based on device ID).
+- **obj_id** - Id of image/data source in tsk_objects
+- **device_id** - Unique ID (GUID) for the device that contains the data source
+- **time_zone** - Timezone that the data source was originally located in
+- **acquisition_details** - Notes on the acquisition of the data source
+
+
+# Disk Image Tables
+
+## tsk_image_info 
+Contains information about each set of images that is stored in the database. 
+- **obj_id** - Id of image in tsk_objects
+- **type** - Type of disk image format (as org.sleuthkit.datamodel.TskData.TSK_IMG_TYPE_ENUM)
+- **ssize** - Sector size of device in bytes
+- **tzone** - Timezone where image is from (the same format that TSK tools want as input)
+- **size** - Size of the original image (in bytes) 
+- **md5** - MD5 hash of the image (for compressed data such as E01, the hashes are of the decompressed image, not the E01 itself)
+- **sha1** - SHA-1 hash of the image
+- **sha256** - SHA-256 hash of the image
+- **display_name** - Display name of the image
+
+## tsk_image_names
+Stores path(s) to file(s) on disk that make up an image set.
+- **obj_id** - Id of image in tsk_objects
+- **name** - Path to location of image file on disk
+- **sequence** - Position in sequence of image parts
+
+
+# Volume System Tables
+## tsk_vs_info
+Contains one row for every volume system found in the images.
+- **obj_id** - Id of volume system in tsk_objects
+- **vs_type** - Type of volume system / media management (as org.sleuthkit.datamodel.TskData.TSK_VS_TYPE_ENUM)
+- **img_offset** - Byte offset where VS starts in disk image
+- **block_size** - Size of blocks in bytes
+
+## tsk_vs_parts
+Contains one row for every volume / partition in the images. 
+- **obj_id** - Id of volume in tsk_objects
+- **addr** - Address of the partition
+- **start** - Sector offset of start of partition
+- **length** - Number of sectors in partition
+- **desc** - Description of partition (volume system type-specific)
+- **flags** - Flags for partition (as org.sleuthkit.datamodel.TskData.TSK_VS_PART_FLAG_ENUM)
+
+## tsk_pool_info 
+Contains information about pools (for APFS, logical disk management, etc.)
+- **obj_id** - Id of pool in tsk_objects
+- **pool_type** - Type of pool (as org.sleuthkit.datamodel.TskData.TSK_POOL_TYPE_ENUM)
+
+# File System Tables
+## tsk_fs_info
+Contains one for for every file system in the images. 
+- **obj_id** - Id of filesystem in tsk_objects
+- **data_source_obj_id** - Id of the data source for the file system
+- **img_offset** - Byte offset that filesystem starts at
+- **fs_type** - Type of file system (as org.sleuthkit.datamodel.TskData.TSK_FS_TYPE_ENUM)
+- **block_size** - Size of each block (in bytes)
+- **block_count** - Number of blocks in filesystem
+- **root_inum** - Metadata address of root directory
+- **first_inum** - First valid metadata address
+- **last_inum** - Last valid metadata address
+- **display_name** - Display name of file system (could be volume label)
+
+## tsk_files
+Contains one for for every file found in the images.  Has the basic metadata for the file. 
+- **obj_id** - Id of file in tsk_objects
+- **fs_obj_id** - Id of filesystem in tsk_objects (NULL if file is not located in a file system -- carved in unpartitioned space, etc.)
+- **data_source_obj_id** - Id of the data source for the file
+- **attr_type** - Type of attribute (as org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM)
+- **attr_id** - Id of attribute
+- **name** - Name of attribute. Will be NULL if attribute doesn't have a name.  Must not have any slashes in it. 
+- **meta_addr** - Address of the metadata structure that the name points to
+- **meta_seq** - Sequence of the metadata address
+- **type** - Type of file: filesystem, carved, etc. (as org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM enum)
+- **has_layout** - True if file has an entry in tsk_file_layout
+- **has_path** - True if file has an entry in tsk_files_path
+- **dir_type** - File type information: directory, file, etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM)
+- **meta_type** - File type (as org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM)
+- **dir_flags** -  Flags that describe allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM)
+- **meta_flags** - Flags for the file for its allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM)
+- **size** - File size in bytes
+- **ctime** - Last file / metadata status change time (stored in number of seconds since Jan 1, 1970 UTC)
+- **crtime** - Created time
+- **atime** - Last file content accessed time
+- **mtime** - Last file content modification time
+- **mode** - Unix-style permissions (as org.sleuthkit.datamodel.TskData.TSK_FS_META_MODE_ENUM)
+- **uid** - Owner id
+- **gid** - Group id
+- **md5** - MD5 hash of file contents
+- **sha256** - SHA-256 hash of file contents
+- **known** - Known status of file (as org.sleuthkit.datamodel.TskData.FileKnown)
+- **parent_path** - Full path of parent folder. Must begin and end with a '/' (Note that a single '/' is valid)
+- **mime_type** - MIME type of the file content, if it has been detected. 
+- **extension** - File extension
+
+## tsk_file_layout
+Stores the layout of a file within the image.  A file will have one or more rows in this table depending on how fragmented it was. All file types use this table (file system, carved, unallocated blocks, etc.).
+- **obj_id** - Id of file in tsk_objects
+- **sequence** - Position of the run in the file (0-based and the obj_id and sequence pair will be unique in the table)
+- **byte_start** - Byte offset of fragment relative to the start of the image file
+- **byte_len** - Length of fragment in bytes
+
+
+## tsk_files_path
+If a "locally-stored" file has been imported into the database for analysis, then this table stores its path.  Used for derived files and other files that are not directly in the image file.
+- **obj_id** - Id of file in tsk_objects
+- **path** - Path to where the file is locally stored in a file system
+- **encoding_type** - Method used to store the file on the disk 
+
+## file_encoding_types 
+Methods that can be used to store files on local disks to prevent them from being quarantined by antivirus
+- **encoding_type** - ID of method used to store data.  See org.sleuthkit.datamodel.TskData.EncodingType enum 
+- **name** -  Display name of technique
+
+## tsk_files_derived_method
+Derived files are those that result from analyzing another file.  For example, files that are extracted from a ZIP file will be considered derived.  This table keeps track of the derivation techniques that were used to make the derived files. 
+
+NOTE: This table is not used in any code.
+
+- **derived_id** - Unique id for the derivation method. 
+- **tool_name** - Name of derivation method/tool
+- **tool_version** - Version of tool used in derivation method
+- **other** - Other details
+
+## tsk_files_derived
+Each derived file has a row that captures the information needed to re-derive it
+
+NOTE: This table is not used in any code.
+
+- **obj_id** - Id of file in tsk_objects
+- **derived_id** - Id of derivation method in tsk_files_derived_method
+- **rederive** - Details needed to re-derive file (will be specific to the derivation method)
+
+
+# Blackboard Tables 
+The \ref mod_bbpage "Blackboard" is used to store results from analysis modules. 
+
+## blackboard_artifacts
+Stores artifacts associated with objects.
+- **artifact_id** - Id of the artifact (assigned by the database)
+- **obj_id** - Id of the associated object
+- **artifact_type_id** - Id for the type of artifact (can be looked up in the blackboard_artifact_types table)
+- **data_source_obj_id** - Id of the data source for the artifact
+- **artifact_type_id** - Type of artifact (references artifact_type_id in blackboard_artifact_types)
+- **review_status_id** - Review status (references review_status_id in review_statuses)
+
+## blackboard_attributes
+Stores name value pairs associated with an artifact. Only one of the value columns should be populated.
+- **artifact_id** - Id of the associated artifact
+- **artifact_type_id** - Artifact type of the associated artifact
+- **source** - Source string, should be module name that created the entry
+- **context** - Additional context string
+- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## blackboard_artifact_types
+Types of artifacts
+- **artifact_type_id** - Id for the type (this is used by the blackboard_artifacts table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+
+## blackboard_attribute_types
+Types of attribute
+- **attribute_type_id** - Id for the type (this is used by the blackboard_attributes table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+- **value_type** - Expected type of data for the attribute type (see blackboard_attributes)
+
+## review_statuses
+Review status of an artifact. Should mirror the org.sleuthkit.datamodel.BlackboardArtifact.ReviewStatus enum.
+- **review_status_id** - Id of the status 
+- **review_status_name** - Internal name of the status
+- **display_name** - Display name (should be human readable)
+
+
+# Communication Accounts
+Stores data related to communications between two parties. It is highly recommended to use 
+the org.sleuthkit.datamodel.CommunicationsManager API to create/access this type of data
+(see the \ref mod_compage page).
+
+## accounts
+Stores communication accounts (email, phone number, etc.).  Note that this does not include OS accounts. 
+- **account_id** - Id for the account within the scope of the database (i.e. Row Id) (used in the account_relationships table)
+- **account_type_id** - The type of account (must match an account_type_id entry from the account_types table)
+- **account_unique_identifier** - The phone number/email/other identifier associated with the account that is unique within the Account Type 
+
+## account_types
+Types of accounts and service providers (Phone, email, Twitter, Facebook, etc.)
+- **account_type_id** - Id for the type (this is used by the accounts table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+
+## account_relationships
+Stores non-directional relationships between two accounts if they communicated or had references to each other (such as contact book)
+- **relationship_id** -  Id for the relationship
+- **account1_id** - Id of the first participant (from account_id column in accounts table)
+- **account2_id** - Id of the second participant (from account_id column in accounts table)
+- **relationship_source_obj_id** - Id of the artifact this relationship was derived from (artifact_id column from the blackboard_artifacts)
+- **date_time** - Time the communication took place, stored in number of seconds since Jan 1, 1970 UTC (NULL if unknown)
+- **relationship_type** - The type of relationship (as org.sleuthkit.datamodel.Relationship.Type)
+- **data_source_obj_id** - Id of the data source this relationship came from (from obj_id in data_source_info)
+
+# Timeline
+Stores data used to populate various timelines. Two tables are used to reduce data duplication. It is highly recommended to use 
+the org.sleuthkit.datamodel.TimelineManager API to create/access this type of data.  
+
+## tsk_event_types
+Stores the types for events. The super_type_id column is used to arrange the types into a tree.
+- **event_type_id** - Id for the type
+- **display_name** - Display name for the type (unique, should be human readable)
+- **super_type_id** - Parent type for the type (used for building heirarchy; references the event_type_id in this table)
+
+## tsk_event_descriptions
+Stores descriptions of an event. This table exists to reduce duplicate data that is common to events. For example, a file will have only one row in tsk_event_descriptions, but could have 4+ rows in tsk_events that all refer to the same description. Note that the combination of the full_description, content_obj_id, and artifact_id columns must be unique.
+- **event_description_id** - Id for the event description
+- **full_description** - Full length description of the event (required).  For example, the full file path including file name. 
+- **med_description** - Medium length description of the event (may be null).  For example, a file may have only the first three folder names.
+- **short_description** - Short length description of the event (may be null).  For example, a file may have only its first folder name. 
+- **data_source_obj_id** -  Object id of the data source for the event source (references obj_id column in data_source_info)
+- **content_obj_id** - If the event is from a non-artifact, then this is the object id from that source.  If the event is from an artifact, then this is the object id of the artifact's source. (references obj_id column in tsk_objects)
+- **artifact_id** - If the event is from a non-artifact, this is null. If the event is from an artifact, then this is the id of the artifact (references artifact_id column in blackboard_artifacts) (may be null)
+- **hash_hit** - 1 if the file associated with the event has a hash set hit, 0 otherwise
+- **tagged** - 1 if the direct source of the event has been tagged, 0 otherwise
+
+## tsk_events
+Stores each event. A file, artifact, or other type of content can have several rows in this table. One for each time stamp. 
+- **event_id** - Id for the event
+- **event_type_id** - Event type id (references event_type_id column in tsk_event_types)
+- **event_description_id** - Event description id (references event_description_id column in tsk_event_descriptions)
+- **time** -  Time the event occurred, in seconds from the UNIX epoch
+
+# Examiners and Reports
+
+## tsk_examiners
+Encapsulates the concept of an examiner associated with a case.
+- **examiner_id** - Id for the examiner
+- **login_name** - Login name for the examiner (must be unique)
+- **display_name** - Display name for the examiner (may be null)
+
+## reports
+Stores information on generated reports.
+- **obj_id** - Id of the report
+- **path** - Full path to the report (including file name)
+- **crtime** - Time the report was created, in seconds from the UNIX epoch
+- **src_module_name** - Name of the module that created the report
+- **report_name** - Name of the report (can be empty string)
+
+# Tags 
+
+## tag_names
+Defines what tag names the user has created and can therefore be applied.
+- **tag_name_id** - Unique ID for each tag name
+- **display_name** - Display name of tag
+- **description**  - Description  (can be empty string)
+- **color** - Color choice for tag (can be empty string)
+- **knownStatus** - Stores whether a tag is notable/bad (as org.sleuthkit.datamodel.TskData.FileKnown enum)
+- **tag_set_id** - Id of the tag set the tag name belongs to (references tag_set_id in tsk_tag_sets, may be null)
+- **rank** - Used to order the tag names for a given tag set for display purposes
+
+## tsk_tag_sets
+Used to group entries from the tag_names table. An object can have only one tag from a tag set at a time. 
+- **tag_set_id** - Id of the tag set
+- **name** - Name of the tag set (unique, should be human readable)
+
+## content_tags
+One row for each file tagged.  
+- **tag_id** - unique ID
+- **obj_id** - object id of Content that has been tagged
+- **tag_name_id** - Tag name that was used
+- **comment**  - optional comment 
+- **begin_byte_offset** - optional byte offset into file that was tagged
+- **end_byte_offset** - optional byte ending offset into file that was tagged
+- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
+
+## blackboard_artifact_tags
+One row for each artifact that is tagged.
+- **tag_id** - unique ID
+- **artifact_id** - Artifact ID of artifact that was tagged
+- **tag_name_id** - Tag name that was used
+- **comment** - Optional comment
+- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
+
+
+# Ingest Module Status
+These tables keep track in Autopsy which modules were run on the data sources.
+
+## ingest_module_types
+Defines the types of ingest modules supported. Must exactly match the names and ordering in the org.sleuthkit.datamodel.IngestModuleInfo.IngestModuleType enum.
+- **type_id** - Id for the ingest module type
+- **type_name** - Internal name for the ingest module type
+
+## ingest_modules
+Defines which modules were installed and run on at least one data source.  One row for each module. 
+- **ingest_module_id** - Id of the ingest module
+- **display_name** - Display name for the ingest module (should be human readable)
+- **unique_name** - Unique name for the ingest module
+- **type_id** - Type of ingest module (references type_id from ingest_module_types)
+- **version** - Version of the ingest module
+
+## ingest_job_status_types
+Defines the status options for ingest jobs. Must match the names and ordering in the org.sleuthkit.datamodel.IngestJobInfo.IngestJobStatusType enum.
+- **type_id** - Id for the ingest job status type
+- **type_name** - Internal name for the ingest job status type
+
+##  ingest_jobs
+One row is created each time ingest is started, which is a set of modules in a pipeline. 
+- **ingest_job_id** - Id of the ingest job
+- **obj_id** - Id of the data source ingest is being run on
+- **host_name** - Name of the host that is running the ingest job
+- **start_date_time** - Time the ingest job started (stored in number of milliseconds since Jan 1, 1970 UTC)
+- **end_date_time** - Time the ingest job finished (stored in number of milliseconds since Jan 1, 1970 UTC)
+- **status_id** - Ingest job status (references type_id from ingest_job_status_types)
+- **settings_dir** - Directory of the job's settings (may be an empty string)
+
+##  ingest_job_modules
+Defines the order of the modules in a given pipeline (i.e. ingest_job).
+- **ingest_job_id** - Id for the ingest job (references ingest_job_id in ingest_jobs)
+- **ingest_module_id** - Id of the ingest module (references ingest_module_id in ingest_modules)
+- **pipeline_position** - Order that the ingest module was run
+
+
+*/
diff --git a/bindings/java/doxygen/schema/db_schema_9_0.dox b/bindings/java/doxygen/schema/db_schema_9_0.dox
new file mode 100644
index 0000000000000000000000000000000000000000..182b6c797bbb50f7923710491815f21d3d69df39
--- /dev/null
+++ b/bindings/java/doxygen/schema/db_schema_9_0.dox
@@ -0,0 +1,7 @@
+/*! \page db_schema_9_0_page TSK & Autopsy Database Schema (Schema version 9.0)
+
+Schema 9.0 is not associated with a released version of Autopsy and is almost the same as schema 9.1. 
+
+Please see the \ref db_schema_9_1_page page for all changes from schema 8.6 to schema 9.0.
+
+*/
diff --git a/bindings/java/doxygen/schema/db_schema_9_1.dox b/bindings/java/doxygen/schema/db_schema_9_1.dox
new file mode 100644
index 0000000000000000000000000000000000000000..0c7a48a203e5d1cea49d99636d7d12a2f9562f0e
--- /dev/null
+++ b/bindings/java/doxygen/schema/db_schema_9_1.dox
@@ -0,0 +1,543 @@
+/*! \page db_schema_9_1_page TSK & Autopsy Database Schema (Schema version 9.1)
+
+[TOC]
+
+# Introduction
+
+This page outlines version 9.1 the database that is used by The Sleuth Kit and Autopsy. The goal of this page is to provide short descriptions for each table and column and not focus on foreign key requirements, etc. If you want that level of detail, then refer to the actual schema in addition to this. 
+
+Each Autopsy release is associated with a schema version with a major and minor version number. If a case with an older schema version is opened in a new version of Autopsy, the case will automatically be updated to the current schema. Going the other direction (opening a case that was created with a newer version of Autopsy), two things may happen:
+- If the case database has the same major number as the version of Autopsy being used, the case should generally be able to be opened and used.
+- If the case database has a higher major number than the version of Autopsy being used, an error will be displayed when attempting to open the case. 
+
+You can find a basic graphic of some of the table relationships <a href="https://docs.google.com/drawings/d/1omR_uUAp1fQt720oJ-kk8C48BXmVa3PNjPZCDdT0Tb4/edit?usp#sharing">here</a>
+
+
+Some general notes on this schema:
+- Nearly every type of data is assigned a unique ID, called the Object ID
+- The objects form a hierarchy, that shows where data came from.  A child comes from its parent.  
+ - For example, disk images are the root, with a volume system below it, then a file system, and then files and directories. 
+- This schema has been designed to store data beyond the file system data that The Sleuth Kit supports. It can store carved files, a folder full of local files, etc.
+- The Blackboard is used to store artifacts, which contain attributes (name/value pairs).  Artifacts are used to store data types that do not have more formal tables. Module writers can make whatever artifact types they want. See \ref mod_bbpage for more details. 
+- The Sleuth Kit will make virtual files to span the unallocated space.  They will have a naming format of 'Unalloc_[PARENT-OBJECT-ID]_[BYTE-START]_[BYTE-END]'.
+
+# Schema Information
+
+This was a big change. Tables were added to support analysis results, OS accounts, hosts and person structure of data sources, and host addresses (IPs, DNS, etc.). The major component of the version number has been incremented because there are new org.sleuthkit.datamodel.TskData.ObjectType enum types (OsAccount and HostAddress). More information on how to use these new objects can be found on the \ref mod_dspage and \ref mod_os_accounts_page pages.
+
+<ul>
+<li><b>Autopsy versions: </b> Autopsy 4.19
+<li><b>Changes from version 8.6:</b>
+<ul>
+<li> New columns:
+<ul>
+<li>host_id, added_date_time, acquisition_tool_settings, acquisition_tool_name, acquisition_tool_version in data_source_info
+<li>category_type in artifact_types
+<li>owner_uid, os_account_obj_id in tsk_files
+</ul>
+<li> New tables:
+<ul>
+<li>tsk_aggregate_score
+<li>tsk_analysis_results
+<li>tsk_data_artifacts
+<li>tsk_file_attributes
+<li>tsk_hosts
+<li>tsk_host_addresses
+<li>tsk_host_address_dns_ip_map
+<li>tsk_host_address_usage
+<li>tsk_os_accounts
+<li>tsk_os_account_attributes
+<li>tsk_os_account_instances
+<li>tsk_os_account_realms
+<li>tsk_persons
+</ul>
+</ul>
+</ul>
+
+
+# General Information Tables 
+## tsk_db_info 
+Metadata about the database.
+- **schema_ver** - Major version number of the current database schema
+- **tsk_ver** - Version of TSK used to create database
+- **schema_minor_version** - Minor version number of the current database schema
+
+## tsk_db_info_extended
+Name & Value pair table to store any information about the database.  For example, which schema it was created with. etc. 
+- **name** - Any string name
+- **value** - Any string value
+
+
+# Object Tables 
+## tsk_objects 
+Every object (image, volume system, file, etc.) has an entry in this table.  This table allows you to find the parent of a given object and allows objects to be tagged and have children.  This table provides items with a unique object id.  The details of the object are in other tables.  
+- **obj_id** - Unique id 
+- **par_obj_id** - The object id of the parent object (NULL for root objects). The parent of a volume system is an image, the parent of a directory is a directory or filesystem, the parent of a filesystem is a volume or an image, etc.
+- **type** - Object type (as org.sleuthkit.datamodel.TskData.ObjectType enum)
+
+
+# Hosts / Persons
+Stores data related to hosts and persons, which can help organize data sources. 
+
+## tsk_persons
+Stores persons for the case. A peron is someone who owns or used a data source in the case. 
+- **id** - Id of the person
+- **name** - Name of the person (should be human readable)
+
+## tsk_hosts
+Stores hosts that have a data source in the case. Each data source must be associated with a host.  These are NOT created for a reference to an external host (such as a web domain). 
+- **id** - Id of the host
+- **name** - Name of the host (should be human readable)
+- **db_status** - Status of the host (active/merged/deleted as org.sleuthkit.datamodel.Host.HostDbStatus)
+- **person_id** - Optional id of associated person
+- **merged_into** - Stores the host ID that this host was merged into
+
+# Data Source / Device Tables 
+## data_source_info
+Contains information about a data source, which could be an image.  This is where we group data sources into devices (based on device ID).
+- **obj_id** - Id of image/data source in tsk_objects
+- **device_id** - Unique ID (GUID) for the device that contains the data source
+- **time_zone** - Timezone that the data source was originally located in
+- **acquisition_details** - Notes on the acquisition of the data source
+- **added_date_time** - Timestamp of when the data source was added
+- **acquisition_tool_name** - Name of the tool used to acquire the image
+- **acquisition_tool_settings** - Specific settings used by the tool to acquire the image
+- **acquisition_tool_version** - Version of the acquisition tool
+- **host_id** - Host associated with this image (must be set)
+
+
+# Disk Image Tables
+
+## tsk_image_info 
+Contains information about each set of images that is stored in the database. 
+- **obj_id** - Id of image in tsk_objects
+- **type** - Type of disk image format (as org.sleuthkit.datamodel.TskData.TSK_IMG_TYPE_ENUM)
+- **ssize** - Sector size of device in bytes
+- **tzone** - Timezone where image is from (the same format that TSK tools want as input)
+- **size** - Size of the original image (in bytes) 
+- **md5** - MD5 hash of the image (for compressed data such as E01, the hashes are of the decompressed image, not the E01 itself)
+- **sha1** - SHA-1 hash of the image
+- **sha256** - SHA-256 hash of the image
+- **display_name** - Display name of the image
+
+## tsk_image_names
+Stores path(s) to file(s) on disk that make up an image set.
+- **obj_id** - Id of image in tsk_objects
+- **name** - Path to location of image file on disk
+- **sequence** - Position in sequence of image parts
+
+
+# Volume System Tables
+## tsk_vs_info
+Contains one row for every volume system found in the images.
+- **obj_id** - Id of volume system in tsk_objects
+- **vs_type** - Type of volume system / media management (as org.sleuthkit.datamodel.TskData.TSK_VS_TYPE_ENUM)
+- **img_offset** - Byte offset where VS starts in disk image
+- **block_size** - Size of blocks in bytes
+
+## tsk_vs_parts
+Contains one row for every volume / partition in the images. 
+- **obj_id** - Id of volume in tsk_objects
+- **addr** - Address of the partition
+- **start** - Sector offset of start of partition
+- **length** - Number of sectors in partition
+- **desc** - Description of partition (volume system type-specific)
+- **flags** - Flags for partition (as org.sleuthkit.datamodel.TskData.TSK_VS_PART_FLAG_ENUM)
+
+## tsk_pool_info 
+Contains information about pools (for APFS, logical disk management, etc.)
+- **obj_id** - Id of pool in tsk_objects
+- **pool_type** - Type of pool (as org.sleuthkit.datamodel.TskData.TSK_POOL_TYPE_ENUM)
+
+# File System Tables
+## tsk_fs_info
+Contains one for for every file system in the images. 
+- **obj_id** - Id of filesystem in tsk_objects
+- **data_source_obj_id** - Id of the data source for the file system
+- **img_offset** - Byte offset that filesystem starts at
+- **fs_type** - Type of file system (as org.sleuthkit.datamodel.TskData.TSK_FS_TYPE_ENUM)
+- **block_size** - Size of each block (in bytes)
+- **block_count** - Number of blocks in filesystem
+- **root_inum** - Metadata address of root directory
+- **first_inum** - First valid metadata address
+- **last_inum** - Last valid metadata address
+- **display_name** - Display name of file system (could be volume label)
+
+## tsk_files
+Contains one for for every file found in the images.  Has the basic metadata for the file. 
+- **obj_id** - Id of file in tsk_objects
+- **fs_obj_id** - Id of filesystem in tsk_objects (NULL if file is not located in a file system -- carved in unpartitioned space, etc.)
+- **data_source_obj_id** - Id of the data source for the file
+- **attr_type** - Type of attribute (as org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM)
+- **attr_id** - Id of attribute
+- **name** - Name of attribute. Will be NULL if attribute doesn't have a name.  Must not have any slashes in it. 
+- **meta_addr** - Address of the metadata structure that the name points to
+- **meta_seq** - Sequence of the metadata address
+- **type** - Type of file: filesystem, carved, etc. (as org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM enum)
+- **has_layout** - True if file has an entry in tsk_file_layout
+- **has_path** - True if file has an entry in tsk_files_path
+- **dir_type** - File type information: directory, file, etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_TYPE_ENUM)
+- **meta_type** - File type (as org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM)
+- **dir_flags** -  Flags that describe allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM)
+- **meta_flags** - Flags for the file for its allocation status etc. (as org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM)
+- **size** - File size in bytes
+- **ctime** - Last file / metadata status change time (stored in number of seconds since Jan 1, 1970 UTC)
+- **crtime** - Created time
+- **atime** - Last file content accessed time
+- **mtime** - Last file content modification time
+- **mode** - Unix-style permissions (as org.sleuthkit.datamodel.TskData.TSK_FS_META_MODE_ENUM)
+- **uid** - Owner id
+- **gid** - Group id
+- **md5** - MD5 hash of file contents
+- **sha256** - SHA-256 hash of file contents
+- **known** - Known status of file (as org.sleuthkit.datamodel.TskData.FileKnown)
+- **parent_path** - Full path of parent folder. Must begin and end with a '/' (Note that a single '/' is valid)
+- **mime_type** - MIME type of the file content, if it has been detected. 
+- **extension** - File extension
+- **owner_uid** - Unique ID of the owner (SID in Windows)
+- **os_account_obj_id** - ID of optional associated OS account
+
+## tsk_file_layout
+Stores the layout of a file within the image.  A file will have one or more rows in this table depending on how fragmented it was. All file types use this table (file system, carved, unallocated blocks, etc.).
+- **obj_id** - Id of file in tsk_objects
+- **sequence** - Position of the run in the file (0-based and the obj_id and sequence pair will be unique in the table)
+- **byte_start** - Byte offset of fragment relative to the start of the image file
+- **byte_len** - Length of fragment in bytes
+
+
+## tsk_files_path
+If a "locally-stored" file has been imported into the database for analysis, then this table stores its path.  Used for derived files and other files that are not directly in the image file.
+- **obj_id** - Id of file in tsk_objects
+- **path** - Path to where the file is locally stored in a file system
+- **encoding_type** - Method used to store the file on the disk 
+
+## file_encoding_types 
+Methods that can be used to store files on local disks to prevent them from being quarantined by antivirus
+- **encoding_type** - ID of method used to store data.  See org.sleuthkit.datamodel.TskData.EncodingType enum 
+- **name** -  Display name of technique
+
+## tsk_file_attributes
+Stores extended attributes for a particular file that do not have a column in tsk_files. Custom BlackboardAttribute types can be defined. 
+- **id** - Id of the attribute
+- **obj_id** - File this attribute is associated with (references tsk_files)
+- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## tsk_files_derived_method
+Derived files are those that result from analyzing another file.  For example, files that are extracted from a ZIP file will be considered derived.  This table keeps track of the derivation techniques that were used to make the derived files. 
+
+NOTE: This table is not used in any code.
+
+- **derived_id** - Unique id for the derivation method. 
+- **tool_name** - Name of derivation method/tool
+- **tool_version** - Version of tool used in derivation method
+- **other** - Other details
+
+## tsk_files_derived
+Each derived file has a row that captures the information needed to re-derive it
+
+NOTE: This table is not used in any code.
+
+- **obj_id** - Id of file in tsk_objects
+- **derived_id** - Id of derivation method in tsk_files_derived_method
+- **rederive** - Details needed to re-derive file (will be specific to the derivation method)
+
+
+# Blackboard Tables 
+The \ref mod_bbpage "Blackboard" is used to store results and derived data from analysis modules. 
+
+## blackboard_artifacts
+Stores artifacts associated with objects. 
+- **artifact_id** - Id of the artifact (assigned by the database)
+- **obj_id** - Id of the associated object
+- **artifact_obj_id** - Object id of the artifact
+- **artifact_type_id** - Id for the type of artifact (can be looked up in the blackboard_artifact_types table)
+- **data_source_obj_id** - Id of the data source for the artifact
+- **artifact_type_id** - Type of artifact (references artifact_type_id in blackboard_artifact_types)
+- **review_status_id** - Review status (references review_status_id in review_statuses)
+
+## tsk_analysis_results
+Additional information for artifacts that are analysis results
+- **artifact_obj_id** - Object id of the associated artifact (artifact_obj_id column in blackboard_artifacts)
+- **significance** - Significance to show if the result shows the object is relevant (as org.sleuthkit.datamodel.Score.Significance enum)
+- **method_category** - Category of the analysis method used (as org.sleuthkit.datamodel.Score.MethodCategory enum)
+- **conclusion** - Optional, text description of the conclusion of the analysis method. 
+- **configuration** - Otional, text description of the analysis method configuration (such as what hash set or keyword list was used)
+- **justification** - Optional, text description of justification of the conclusion and significance. 
+- **ignore_score** - True (1) if score should be ignored when calculating aggregate score, false (0) otherwise. This allows users to ignore a false positive.
+
+## tsk_data_artifacts
+Additional information for artifacts that store extracted data. 
+- **artifact_obj_id** - Object id of the associated artifact (artifact_obj_id column in blackboard_artifacts)
+- **os_account_obj_id** - Object id of the associated OS account
+
+## blackboard_artifact_types
+Types of artifacts
+- **artifact_type_id** - Id for the type (this is used by the blackboard_artifacts table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+- **category_type** - Indicates whether this is a data artifact or an analysis result
+
+## blackboard_attributes
+Stores name value pairs associated with an artifact. Only one of the value columns should be populated.
+- **artifact_id** - Id of the associated artifact
+- **artifact_type_id** - Artifact type of the associated artifact
+- **source** - Source string, should be module name that created the entry
+- **context** - Additional context string
+- **attribute_type_id** - Id for the type of attribute (can be looked up in the blackboard_attribute_types)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## blackboard_attribute_types
+Types of attribute
+- **attribute_type_id** - Id for the type (this is used by the blackboard_attributes table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+- **value_type** - Expected type of data for the attribute type (see blackboard_attributes)
+
+## review_statuses
+Review status of an artifact. Should mirror the org.sleuthkit.datamodel.BlackboardArtifact.ReviewStatus enum.
+- **review_status_id** - Id of the status 
+- **review_status_name** - Internal name of the status
+- **display_name** - Display name (should be human readable)
+
+## tsk_aggregate_score
+Stores the score of an object that is a combination of the various analysis result scores
+- **obj_id** - Id of the object that corresponds to this score
+- **data_source_obj_id** - Id of the data source the object belongs to
+- **significance** - Significance (as org.sleuthkit.datamodel.Score.Significance enum)
+- **method_category** - Category of the method used (as org.sleuthkit.datamodel.Score.MethodCategory enum)
+
+
+
+# Host Addresses
+Host addresses are various forms of identifiers assigned to a computer, such as host names or MAC addresses. These tables store data that is also stored in the data artifacts, but these tables allow for correlation and scoring of specific hosts. 
+
+## tsk_host_addresses
+One entry is created in this table for each host address found in the data source.  Examples include domain names (www.sleuthkit.org), IP addresses, and BlueTooth MAC addresses.
+- **id** - Id of the host address
+- **address_type** - Type of address (as org.sleuthkit.datamodel.HostAddress.HostAddressType enum)
+- **address** - Address (must be unique within the scope of address_type). 
+
+## tsk_host_address_dns_ip_map
+Stores data if host names and IP addresses were resolved between each other. 
+- **id** - Id of the mapping
+- **dns_address_id** - Id of the DNS address in tsk_host_addresses
+- **ip_address_id** - Id of the IP address in tsk_host_addresses
+- **source_obj_id** - Id of the object used to determine this mapping (references tsk_objects)
+- **time** - Timestamp when this mapping was recorded
+
+## tsk_host_address_usage
+Tracks which artifacts and files had a reference to a given host address. This is used to show what other artifacts used the same address. 
+- **id** - Id of the usage
+- **addr_obj_id** - Id of the host address
+- **obj_id** - Id of the object that had a reference/usage to the address (references tsk_objects)
+- **data_source_obj_id** - Id of the data source associated with the usage
+
+
+# Operating System Accounts
+Stores data related to operating system accounts.  Communication-related accounts (such as email or social media) are stored in other tables (see Communication Acccounts below).
+
+
+## tsk_os_account_realms
+Every OS Account must belong to a realm, which defines the scope of the account.  Realms can be local to a given computer or domain-based. 
+- **realm_name** - Display bame of the realm (realm_name or realm_addr must be set)
+- **realm_addr** - Address/ID of the realm (realm_name or realm_addr must be set)
+- **realm_signature** - Used internally for unique clause.  realm_addr if it is set.  Otherwise, realm_name.
+- **scope_host_id** - Optional host that this realm is scoped to.  By default, realms are scoped to a given host. 
+- **scope_confidence** - Confidence of the scope of the realm (as org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence enum)
+- **db_status** - Status of this realm in the database (as org.sleuthkit.datamodel.OsAccountRealm.RealmDbStatus enum)
+- **merged_into** - For merged realms, set to the id of the realm they were merged in to.
+
+## tsk_os_accounts
+Stores operating system accounts
+- **os_account_obj_id** - Id of the OS account
+- **realm_id** - Id of the associated realm (references tsk_os_account_realms)
+- **login_name** - Login name (login name or addr must be present)
+- **addr** - Address/ID of account (login name or addr must be present)
+- **signature** - Used internally for unique clause
+- **full_name** - Full name
+- **status** - Status of the account (as org.sleuthkit.datamodel.OsAccount.OsAccountStatus enum)
+- **type** - Type of account (as org.sleuthkit.datamodel.OsAccount.OsAccountType enum)
+- **created_date** - Timestamp of account creation
+- **db_status** - Status of this account in the database (active/merged/deleted)
+- **merged_into** - For merged accounts, set to the id of the account they were merged in to.
+
+## tsk_os_account_attributes
+Stores additional attributes for an OS account. Similar to blackboard_attributes. Attributes can either be specific to a host or domain-scoped. 
+- **id** - Id of the attribute
+- **os_account_obj_id** - Id of the associated OS account
+- **host_id** - Host Id if the attribute is scoped to the host.  NULL if the attribute is domain-scoped.
+- **source_obj_id** - Optional object id of where the attribute data was derived from (such as a registry hive) (references tsk_objects)
+- **attribute_type_id** - Type of attribute (see org.sleuthkit.datamodel.BlackboardAttribute.BlackboardAttribute.Type)
+- **value_type** - The type of the value (see org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE)
+- **value_byte** - A blob of binary data (should be NULL unless the value type is byte)
+- **value_text** - A string of text (should be NULL unless the value type is string)
+- **value_int32** - An integer (should be NULL unless the value type is int)
+- **value_int64** - A long integer / timestamp (should be NULL unless the value type is long)
+- **value_double** - A double (should be NULL unless the value type is double)
+
+## tsk_os_account_instances
+Records that an OS account is associated with a specific data source.  For example, the account logged in, accessed data, etc. 
+- **id** - Id of the OS account instance
+- **os_account_obj_id** - Id of the OS account that was referenced
+- **data_source_obj_id** - Id of the data source
+- **instance_type** - Type of instance (as org.sleuthkit.datamodel.OsAccountInstance.OsAccountInstanceType enum)
+
+
+# Communication Accounts
+Stores data related to communications between two parties. It is highly recommended to use 
+the org.sleuthkit.datamodel.CommunicationsManager API to create/access this type of data
+(see the \ref mod_compage page).
+
+## accounts
+Stores communication accounts (email, phone number, etc.).  Note that this does not include OS accounts. 
+- **account_id** - Id for the account within the scope of the database (i.e. Row Id) (used in the account_relationships table)
+- **account_type_id** - The type of account (must match an account_type_id entry from the account_types table)
+- **account_unique_identifier** - The phone number/email/other identifier associated with the account that is unique within the Account Type 
+
+## account_types
+Types of accounts and service providers (Phone, email, Twitter, Facebook, etc.)
+- **account_type_id** - Id for the type (this is used by the accounts table)
+- **type_name** - A string identifier for the type (unique)
+- **display_name** - A display name for the type (not unique, should be human readable)
+
+## account_relationships
+Stores non-directional relationships between two accounts if they communicated or had references to each other (such as contact book)
+- **relationship_id** -  Id for the relationship
+- **account1_id** - Id of the first participant (from account_id column in accounts table)
+- **account2_id** - Id of the second participant (from account_id column in accounts table)
+- **relationship_source_obj_id** - Id of the artifact this relationship was derived from (artifact_id column from the blackboard_artifacts)
+- **date_time** - Time the communication took place, stored in number of seconds since Jan 1, 1970 UTC (NULL if unknown)
+- **relationship_type** - The type of relationship (as org.sleuthkit.datamodel.Relationship.Type)
+- **data_source_obj_id** - Id of the data source this relationship came from (from obj_id in data_source_info)
+
+# Timeline
+Stores data used to populate various timelines. Two tables are used to reduce data duplication. It is highly recommended to use 
+the org.sleuthkit.datamodel.TimelineManager API to create/access this type of data.  
+
+## tsk_event_types
+Stores the types for events. The super_type_id column is used to arrange the types into a tree.
+- **event_type_id** - Id for the type
+- **display_name** - Display name for the type (unique, should be human readable)
+- **super_type_id** - Parent type for the type (used for building heirarchy; references the event_type_id in this table)
+
+## tsk_event_descriptions
+Stores descriptions of an event. This table exists to reduce duplicate data that is common to events. For example, a file will have only one row in tsk_event_descriptions, but could have 4+ rows in tsk_events that all refer to the same description. Note that the combination of the full_description, content_obj_id, and artifact_id columns must be unique.
+- **event_description_id** - Id for the event description
+- **full_description** - Full length description of the event (required).  For example, the full file path including file name. 
+- **med_description** - Medium length description of the event (may be null).  For example, a file may have only the first three folder names.
+- **short_description** - Short length description of the event (may be null).  For example, a file may have only its first folder name. 
+- **data_source_obj_id** -  Object id of the data source for the event source (references obj_id column in data_source_info)
+- **content_obj_id** - If the event is from a non-artifact, then this is the object id from that source.  If the event is from an artifact, then this is the object id of the artifact's source. (references obj_id column in tsk_objects)
+- **artifact_id** - If the event is from a non-artifact, this is null. If the event is from an artifact, then this is the id of the artifact (references artifact_id column in blackboard_artifacts) (may be null)
+- **hash_hit** - 1 if the file associated with the event has a hash set hit, 0 otherwise
+- **tagged** - 1 if the direct source of the event has been tagged, 0 otherwise
+
+## tsk_events
+Stores each event. A file, artifact, or other type of content can have several rows in this table. One for each time stamp. 
+- **event_id** - Id for the event
+- **event_type_id** - Event type id (references event_type_id column in tsk_event_types)
+- **event_description_id** - Event description id (references event_description_id column in tsk_event_descriptions)
+- **time** -  Time the event occurred, in seconds from the UNIX epoch
+
+# Examiners and Reports
+
+## tsk_examiners
+Encapsulates the concept of an examiner associated with a case.
+- **examiner_id** - Id for the examiner
+- **login_name** - Login name for the examiner (must be unique)
+- **display_name** - Display name for the examiner (may be null)
+
+## reports
+Stores information on generated reports.
+- **obj_id** - Id of the report
+- **path** - Full path to the report (including file name)
+- **crtime** - Time the report was created, in seconds from the UNIX epoch
+- **src_module_name** - Name of the module that created the report
+- **report_name** - Name of the report (can be empty string)
+
+# Tags 
+
+## tag_names
+Defines what tag names the user has created and can therefore be applied.
+- **tag_name_id** - Unique ID for each tag name
+- **display_name** - Display name of tag
+- **description**  - Description  (can be empty string)
+- **color** - Color choice for tag (can be empty string)
+- **knownStatus** - Stores whether a tag is notable/bad (as org.sleuthkit.datamodel.TskData.FileKnown enum)
+- **tag_set_id** - Id of the tag set the tag name belongs to (references tag_set_id in tsk_tag_sets, may be null)
+- **rank** - Used to order the tag names for a given tag set for display purposes
+
+## tsk_tag_sets
+Used to group entries from the tag_names table. An object can have only one tag from a tag set at a time. 
+- **tag_set_id** - Id of the tag set
+- **name** - Name of the tag set (unique, should be human readable)
+
+## content_tags
+One row for each file tagged.  
+- **tag_id** - unique ID
+- **obj_id** - object id of Content that has been tagged
+- **tag_name_id** - Tag name that was used
+- **comment**  - optional comment 
+- **begin_byte_offset** - optional byte offset into file that was tagged
+- **end_byte_offset** - optional byte ending offset into file that was tagged
+- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
+
+## blackboard_artifact_tags
+One row for each artifact that is tagged.
+- **tag_id** - unique ID
+- **artifact_id** - Artifact ID of artifact that was tagged
+- **tag_name_id** - Tag name that was used
+- **comment** - Optional comment
+- **examiner_id** - Examiner that tagged the artifact (references examiner_id in tsk_examiners)
+
+
+# Ingest Module Status
+These tables keep track in Autopsy which modules were run on the data sources.
+
+## ingest_module_types
+Defines the types of ingest modules supported. Must exactly match the names and ordering in the org.sleuthkit.datamodel.IngestModuleInfo.IngestModuleType enum.
+- **type_id** - Id for the ingest module type
+- **type_name** - Internal name for the ingest module type
+
+## ingest_modules
+Defines which modules were installed and run on at least one data source.  One row for each module. 
+- **ingest_module_id** - Id of the ingest module
+- **display_name** - Display name for the ingest module (should be human readable)
+- **unique_name** - Unique name for the ingest module
+- **type_id** - Type of ingest module (references type_id from ingest_module_types)
+- **version** - Version of the ingest module
+
+## ingest_job_status_types
+Defines the status options for ingest jobs. Must match the names and ordering in the org.sleuthkit.datamodel.IngestJobInfo.IngestJobStatusType enum.
+- **type_id** - Id for the ingest job status type
+- **type_name** - Internal name for the ingest job status type
+
+##  ingest_jobs
+One row is created each time ingest is started, which is a set of modules in a pipeline. 
+- **ingest_job_id** - Id of the ingest job
+- **obj_id** - Id of the data source ingest is being run on
+- **host_name** - Name of the host that is running the ingest job
+- **start_date_time** - Time the ingest job started (stored in number of milliseconds since Jan 1, 1970 UTC)
+- **end_date_time** - Time the ingest job finished (stored in number of milliseconds since Jan 1, 1970 UTC)
+- **status_id** - Ingest job status (references type_id from ingest_job_status_types)
+- **settings_dir** - Directory of the job's settings (may be an empty string)
+
+##  ingest_job_modules
+Defines the order of the modules in a given pipeline (i.e. ingest_job).
+- **ingest_job_id** - Id for the ingest job (references ingest_job_id in ingest_jobs)
+- **ingest_module_id** - Id of the ingest module (references ingest_module_id in ingest_modules)
+- **pipeline_position** - Order that the ingest module was run
+
+
+*/
diff --git a/bindings/java/doxygen/schema/schema_list.dox b/bindings/java/doxygen/schema/schema_list.dox
new file mode 100644
index 0000000000000000000000000000000000000000..9073e4365ca9d2f56f6ce950464d05cb4d75c086
--- /dev/null
+++ b/bindings/java/doxygen/schema/schema_list.dox
@@ -0,0 +1,16 @@
+/*! \page db_schema_page TSK & Autopsy Database Schemas
+
+This page contians links to the documention for selected versions of the TSK & Autopsy database schema.
+
+- Current Schema
+ - \subpage db_schema_9_1_page 
+ 
+- Older Schemas
+ - \subpage db_schema_9_0_page 
+ - \subpage db_schema_8_6_page 
+ - <a href="https://wiki.sleuthkit.org/index.php?title=Database_v7.2_Schema">Schema version 7.2</a>
+ - <a href="https://wiki.sleuthkit.org/index.php?title=SQLite_Database_v6_Schema">Schema version 6</a>
+ - <a href="https://wiki.sleuthkit.org/index.php?title=SQLite_Database_v3_Schema">Schema version 3</a>
+ - <a href="https://wiki.sleuthkit.org/index.php?title=SQLite_Database_v2_Schema">Schema version 2</a>
+
+*/
\ No newline at end of file
diff --git a/bindings/java/jni/auto_db_java.cpp b/bindings/java/jni/auto_db_java.cpp
index 7cc3888cf390a52966d1f52d3cfa0e432844ae71..20e915d81c21412a12f83536ad618421b4cfcc1a 100644
--- a/bindings/java/jni/auto_db_java.cpp
+++ b/bindings/java/jni/auto_db_java.cpp
@@ -110,7 +110,7 @@ TskAutoDbJava::initializeJni(JNIEnv * jniEnv, jobject jobj) {
         return TSK_ERR;
     }
 
-    m_addFileMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addFile", "(JJJIIILjava/lang/String;JJIIIIJJJJJIIILjava/lang/String;Ljava/lang/String;JJJ)J");
+    m_addFileMethodID = m_jniEnv->GetMethodID(m_callbackClass, "addFile", "(JJJIIILjava/lang/String;JJIIIIJJJJJIIILjava/lang/String;Ljava/lang/String;JJJLjava/lang/String;)J");
     if (m_addFileMethodID == NULL) {
         return TSK_ERR;
     }
@@ -631,6 +631,17 @@ TskAutoDbJava::addFile(TSK_FS_FILE* fs_file,
     }
     TSK_INUM_T par_meta_addr = fs_file->name->par_addr;
  
+	char *sid_str = NULL;
+	jstring sidj = NULL;	// return null across JNI if sid is not available
+	
+	if (tsk_fs_file_get_owner_sid(fs_file, &sid_str) == 0) {
+		if (createJString(sid_str, sidj) != TSK_OK) {
+			free(sid_str);
+			return TSK_ERR;
+		}
+		free(sid_str);	
+	}
+		
     // Add the file to the database
     jlong ret_val = m_jniEnv->CallLongMethod(m_javaDbObj, m_addFileMethodID,
         parObjId, fsObjId,
@@ -643,7 +654,7 @@ TskAutoDbJava::addFile(TSK_FS_FILE* fs_file,
         (unsigned long long)crtime, (unsigned long long)ctime, (unsigned long long) atime, (unsigned long long) mtime,
         meta_mode, gid, uid, 
         pathj, extj, 
-        (uint64_t)meta_seq, par_meta_addr, par_seqj);
+        (uint64_t)meta_seq, par_meta_addr, par_seqj, sidj);
 
     if (ret_val < 0) {
         free(name);
@@ -690,7 +701,7 @@ TskAutoDbJava::addFile(TSK_FS_FILE* fs_file,
             (unsigned long long)crtime, (unsigned long long)ctime, (unsigned long long) atime, (unsigned long long) mtime,
             meta_mode, gid, uid, // md5TextPtr, known,
             pathj, slackExtj, 
-            (uint64_t)meta_seq, par_meta_addr, par_seqj);
+            (uint64_t)meta_seq, par_meta_addr, par_seqj, sidj);
 
         if (ret_val < 0) {
             free(name);
diff --git a/bindings/java/jni/dataModel_SleuthkitJNI.cpp b/bindings/java/jni/dataModel_SleuthkitJNI.cpp
index f4da3b4b62a2c5b29289ca6962b732af7a9bf1eb..3310bc1860c516aefed403921abe4431909d89cd 100644
--- a/bindings/java/jni/dataModel_SleuthkitJNI.cpp
+++ b/bindings/java/jni/dataModel_SleuthkitJNI.cpp
@@ -805,6 +805,7 @@ JNIEXPORT jobject JNICALL Java_org_sleuthkit_datamodel_SleuthkitJNI_hashDbLookup
  * @param timeZone The time zone for the image.
  * @param addUnallocSpace Pass true to create virtual files for unallocated space. Ignored if addFileSystems is false.
  * @param skipFatFsOrphans Pass true to skip processing of orphan files for FAT file systems. Ignored if addFileSystems is false.
+ * @param hostId Id of the host (already in the database).
  *
  * @return A pointer to the process (TskAutoDbJava object) or NULL on error.
  */
@@ -823,6 +824,7 @@ JNIEXPORT jlong JNICALL
  * @param addFileSystems Pass true to attempt to add file systems within the image to the case database.
  * @param addUnallocSpace Pass true to create virtual files for unallocated space. Ignored if addFileSystems is false.
  * @param skipFatFsOrphans Pass true to skip processing of orphan files for FAT file systems. Ignored if addFileSystems is false.
+ * @param hostId The ID of the host (already in database).
  *
  * @return A pointer to the process (TskAutoDbJava object) or NULL on error.
  */
diff --git a/bindings/java/nbproject/project.xml b/bindings/java/nbproject/project.xml
index 57a2b6befa984bdfa69a539c68c6200745375049..2b34d8ff45723f61d2970a892b13cf86a71e8cfb 100755
--- a/bindings/java/nbproject/project.xml
+++ b/bindings/java/nbproject/project.xml
@@ -114,14 +114,14 @@
         <java-data xmlns="http://www.netbeans.org/ns/freeform-project-java/4">
             <compilation-unit>
                 <package-root>src</package-root>
-                <classpath mode="compile">lib;lib/diffutils-1.2.1.jar;lib/junit-4.8.2.jar;lib/postgresql-42.2.18.jar;lib/c3p0-0.9.5.jar;lib/mchange-commons-java-0.2.9.jar;lib/c3p0-0.9.5-sources.jar;lib/c3p0-0.9.5-javadoc.jar;lib/joda-time-2.4.jar;lib/commons-lang3-3.0.jar;lib/guava-19.0.jar;lib/SparseBitSet-1.1.jar;lib/gson-2.8.5.jar;lib/commons-validator-1.6.jar</classpath>
+                <classpath mode="compile">lib;lib/diffutils-1.2.1.jar;lib/junit-4.12.jar;lib/postgresql-42.2.18.jar;lib/c3p0-0.9.5.jar;lib/mchange-commons-java-0.2.9.jar;lib/c3p0-0.9.5-sources.jar;lib/c3p0-0.9.5-javadoc.jar;lib/joda-time-2.4.jar;lib/commons-lang3-3.0.jar;lib/guava-19.0.jar;lib/SparseBitSet-1.1.jar;lib/gson-2.8.5.jar;lib/commons-validator-1.6.jar</classpath>
                 <built-to>build</built-to>
                 <source-level>1.8</source-level>
             </compilation-unit>
             <compilation-unit>
                 <package-root>test</package-root>
                 <unit-tests/>
-                <classpath mode="compile">build;lib/diffutils-1.2.1.jar;lib/diffutils-1.2.1-javadoc.jar;lib/diffutils-1.2.1-sources.jar;lib/junit-4.8.2.jar</classpath>
+                <classpath mode="compile">build;lib/diffutils-1.2.1.jar;lib/diffutils-1.2.1-javadoc.jar;lib/diffutils-1.2.1-sources.jar;lib/junit-4.12.jar</classpath>
                 <built-to>build</built-to>
                 <built-to>test</built-to>
                 <source-level>1.8</source-level>
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AbstractAttribute.java b/bindings/java/src/org/sleuthkit/datamodel/AbstractAttribute.java
new file mode 100644
index 0000000000000000000000000000000000000000..23aca765888b608a6c996118a64b772ff0150c78
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/AbstractAttribute.java
@@ -0,0 +1,370 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *  http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * An abstract base class for attributes as name-value pairs with type safety.
+ * The attribute type field indicates which one of the value fields is valid.
+ */
+public abstract class AbstractAttribute {
+
+	private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
+
+	private final BlackboardAttribute.Type attributeType;
+
+	private final int valueInt;
+	private final long valueLong;
+	private final double valueDouble;
+	private final String valueString;
+	private final byte[] valueBytes;
+
+	private SleuthkitCase sleuthkitCase;
+
+	/**
+	 * Constructs an attribute with an integer value.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueInt      The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER.
+	 */
+	public AbstractAttribute(BlackboardAttribute.Type attributeType, int valueInt) {
+		if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) {
+			throw new IllegalArgumentException("Type mismatched with value type");
+		}
+		this.attributeType = attributeType;
+		this.valueInt = valueInt;
+		this.valueLong = 0;
+		this.valueDouble = 0;
+		this.valueString = "";
+		this.valueBytes = new byte[0];
+	}
+
+	/**
+	 * Constructs an attribute with a long/datetime value.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueLong     The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  standard attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG
+	 *                                  or
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME.
+	 */
+	public AbstractAttribute(BlackboardAttribute.Type attributeType, long valueLong) {
+		if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG
+				&& attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) {
+			throw new IllegalArgumentException("Type mismatched with value type");
+		}
+		this.attributeType = attributeType;
+		this.valueInt = 0;
+		this.valueLong = valueLong;
+		this.valueDouble = 0;
+		this.valueString = "";
+		this.valueBytes = new byte[0];
+	}
+
+	/**
+	 * Constructs an attribute with a double value.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueDouble   The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE.
+	 */
+	public AbstractAttribute(BlackboardAttribute.Type attributeType, double valueDouble) {
+		if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) {
+			throw new IllegalArgumentException("Type mismatched with value type");
+		}
+		this.attributeType = attributeType;
+		this.valueInt = 0;
+		this.valueLong = 0;
+		this.valueDouble = valueDouble;
+		this.valueString = "";
+		this.valueBytes = new byte[0];
+	}
+
+	/**
+	 * Constructs an attribute with a string value.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueString   The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING.
+	 */
+	public AbstractAttribute(BlackboardAttribute.Type attributeType, String valueString) {
+		if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING
+				&& attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) {
+			throw new IllegalArgumentException("Type mismatched with value type");
+		}
+		this.attributeType = attributeType;
+		this.valueInt = 0;
+		this.valueLong = 0;
+		this.valueDouble = 0;
+		if (valueString == null) {
+			this.valueString = "";
+		} else {
+			this.valueString = replaceNulls(valueString).trim();
+		}
+		this.valueBytes = new byte[0];
+	}
+
+	/**
+	 * Constructs an attribute with a byte array value.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueBytes    The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE.
+	 */
+	public AbstractAttribute(BlackboardAttribute.Type attributeType, byte[] valueBytes) {
+		if (attributeType.getValueType() != BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) {
+			throw new IllegalArgumentException("Type mismatched with value type");
+		}
+		this.attributeType = attributeType;
+		this.valueInt = 0;
+		this.valueLong = 0;
+		this.valueDouble = 0;
+		this.valueString = "";
+		if (valueBytes == null) {
+			this.valueBytes = new byte[0];
+		} else {
+			this.valueBytes = valueBytes;
+		}
+	}
+
+	/**
+	 * Constructs an attribute.
+	 *
+	 * @param attributeTypeID The attribute type id.
+	 * @param valueType       The attribute value type.
+	 * @param valueInt        The value from the the value_int32 column.
+	 * @param valueLong       The value from the the value_int64 column.
+	 * @param valueDouble     The value from the the value_double column.
+	 * @param valueString     The value from the the value_text column.
+	 * @param valueBytes      The value from the the value_byte column.
+	 * @param sleuthkitCase   A reference to the SleuthkitCase object
+	 *                        representing the case database.
+	 */
+	AbstractAttribute(BlackboardAttribute.Type attributeType,
+			int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes,
+			SleuthkitCase sleuthkitCase) {
+
+		this.attributeType = attributeType;
+		this.valueInt = valueInt;
+		this.valueLong = valueLong;
+		this.valueDouble = valueDouble;
+		if (valueString == null) {
+			this.valueString = "";
+		} else {
+			this.valueString = replaceNulls(valueString).trim();
+		}
+		if (valueBytes == null) {
+			this.valueBytes = new byte[0];
+		} else {
+			this.valueBytes = valueBytes;
+		}
+		this.sleuthkitCase = sleuthkitCase;
+	}
+
+	/**
+	 * Gets the attribute value as a string, formatted as required.
+	 *
+	 * @return The value as a string.
+	 */
+	public String getDisplayString() {
+		switch (attributeType.getValueType()) {
+			case STRING:
+				return getValueString();
+			case INTEGER:
+				if (attributeType.getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_READ_STATUS.getTypeID()) {
+					if (getValueInt() == 0) {
+						return "Unread";
+					} else {
+						return "Read";
+					}
+				}
+				return Integer.toString(getValueInt());
+			case LONG:
+				return Long.toString(getValueLong());
+			case DOUBLE:
+				return Double.toString(getValueDouble());
+			case BYTE:
+				return bytesToHexString(getValueBytes());
+			case DATETIME:
+				// once we have TSK timezone, that should be used here.
+				return TimeUtilities.epochToTime(getValueLong());
+			case JSON: {
+				return getValueString();
+			}
+		}
+		return "";
+	}
+
+	/**
+	 * Gets the type of this attribute.
+	 *
+	 * @return The attribute type.
+	 */
+	public BlackboardAttribute.Type getAttributeType() {
+		return this.attributeType;
+	}
+
+	/**
+	 * Gets the value type of this attribute.
+	 *
+	 * @return The value type
+	 */
+	public BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE getValueType() {
+		return attributeType.getValueType();
+	}
+
+	/**
+	 * Gets the value of this attribute. The value is only valid if the
+	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER.
+	 *
+	 * @return The attribute value.
+	 */
+	public int getValueInt() {
+		return valueInt;
+	}
+
+	/**
+	 * Gets the value of this attribute. The value is only valid if the
+	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG.
+	 *
+	 * @return The attribute value.
+	 */
+	public long getValueLong() {
+		return valueLong;
+	}
+
+	/**
+	 * Gets the value of this attribute. The value is only valid if the
+	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE.
+	 *
+	 * @return The attribute value.
+	 */
+	public double getValueDouble() {
+		return valueDouble;
+	}
+
+	/**
+	 * Gets the value of this attribute. The value is only valid if the
+	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING or
+	 * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON.
+	 *
+	 * @return The attribute value.
+	 */
+	public String getValueString() {
+		return valueString;
+	}
+
+	/**
+	 * Gets the value of this attribute. The value is only valid if the
+	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE.
+	 *
+	 * @return The attribute value.
+	 */
+	public byte[] getValueBytes() {
+		return Arrays.copyOf(valueBytes, valueBytes.length);
+	}
+
+	/**
+	 * Gets the reference to the SleuthkitCase object that represents the case
+	 * database where this attribute is stored.
+	 *
+	 * @return A reference to a SleuthkitCase object.
+	 */
+	SleuthkitCase getCaseDatabase() {
+		return this.sleuthkitCase;
+	}
+
+	/**
+	 * Sets the reference to the SleuthkitCase object that represents the case
+	 * database where this attribute is stored.
+	 *
+	 * @param sleuthkitCase A reference to a SleuthkitCase object.
+	 */
+	void setCaseDatabase(SleuthkitCase sleuthkitCase) {
+		this.sleuthkitCase = sleuthkitCase;
+	}
+
+	/**
+	 * Converts a byte array to a string.
+	 *
+	 * @param bytes The byte array.
+	 *
+	 * @return The string.
+	 */
+	static String bytesToHexString(byte[] bytes) {
+		// from http://stackoverflow.com/questions/9655181/convert-from-byte-array-to-hex-string-in-java
+		char[] hexChars = new char[bytes.length * 2];
+		for (int j = 0; j < bytes.length; j++) {
+			int v = bytes[j] & 0xFF;
+			hexChars[j * 2] = HEX_ARRAY[v >>> 4];
+			hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F];
+		}
+		return new String(hexChars);
+	}
+
+	/**
+	 * Replace all NUL characters in the string with the SUB character
+	 *
+	 * @param text The input string.
+	 *
+	 * @return The output string.
+	 */
+	static String replaceNulls(String text) {
+		return text.replace((char) 0x00, (char) 0x1A);
+	}
+
+	/**
+	 * Checks whether all of the the value fields of this attribute are equal to
+	 * that of another attribute.
+	 *
+	 * @param that Another attribute.
+	 *
+	 * @return True or false.
+	 */
+	boolean areValuesEqual(Object that) {
+		if (that instanceof AbstractAttribute) {
+			AbstractAttribute other = (AbstractAttribute) that;
+			Object[] thisObject = new Object[]{this.getAttributeType(), this.getValueInt(), this.getValueLong(), this.getValueDouble(),
+				this.getValueString(), this.getValueBytes()};
+			Object[] otherObject = new Object[]{other.getAttributeType(), other.getValueInt(), other.getValueLong(), other.getValueDouble(),
+				other.getValueString(), other.getValueBytes()};
+			return Objects.deepEquals(thisObject, otherObject);
+		} else {
+			return false;
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java b/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java
index 50afea2be2b2dc50a86ce6f58d27f7789ab3662a..f16830f626477bd878c5835d658e4985b326f276 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/AbstractContent.java
@@ -19,14 +19,18 @@
 package org.sleuthkit.datamodel;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 import org.sleuthkit.datamodel.SleuthkitCase.ObjectInfo;
 
 /**
@@ -35,6 +39,7 @@
  */
 public abstract class AbstractContent implements Content {
 
+	private final static BlackboardArtifact.Type GEN_INFO_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_GEN_INFO);
 	public final static long UNKNOWN_ID = -1;
 	private final SleuthkitCase db;
 	private final long objId;
@@ -82,7 +87,7 @@ public String getUniquePath() throws TskCoreException {
 			if (myParent != null) {
 				tempUniquePath = myParent.getUniquePath() + tempUniquePath;
 			}
-			
+
 			// Don't update uniquePath until it is complete.
 			uniquePath = tempUniquePath;
 		}
@@ -200,73 +205,74 @@ public Content getDataSource() throws TskCoreException {
 
 		return myParent.getDataSource();
 	}
-	
+
 	/**
 	 * Return whether this content has a Pool above it
-	 * 
+	 *
 	 * @return true if there is a Pool object in the parent structure
-	 * 
-	 * @throws TskCoreException 
+	 *
+	 * @throws TskCoreException
 	 */
 	boolean isPoolContent() throws TskCoreException {
 		return getPool() != null;
 	}
-	
+
 	/**
-	 * Get the pool volume 
-	 * 
-	 * @return the volume above this content and below a Pool object or null if not found
-	 * 
-	 * @throws TskCoreException 
+	 * Get the pool volume
+	 *
+	 * @return the volume above this content and below a Pool object or null if
+	 *         not found
+	 *
+	 * @throws TskCoreException
 	 */
 	Volume getPoolVolume() throws TskCoreException {
 		Content myParent = getParent();
 		if (myParent == null) {
 			return null;
 		}
-		
-		if (! (myParent instanceof AbstractContent)) {
+
+		if (!(myParent instanceof AbstractContent)) {
 			return null;
 		}
-		
+
 		if (myParent instanceof Volume) {
 			// This is potentially it, but need to check that this is a volume under a pool
 			if (((Volume) myParent).isPoolContent()) {
-				return (Volume)myParent;
+				return (Volume) myParent;
 			} else {
 				// There are no pools in the hierarchy, so we're done
 				return null;
 			}
 		}
-		
+
 		// Try one level higher
-		return ((AbstractContent)myParent).getPoolVolume();
-	}	
-	
+		return ((AbstractContent) myParent).getPoolVolume();
+	}
+
 	/**
-	 * Get the pool  
-	 * 
+	 * Get the pool
+	 *
 	 * @return the pool above this content or null if not found
-	 * 
-	 * @throws TskCoreException 
+	 *
+	 * @throws TskCoreException
 	 */
 	Pool getPool() throws TskCoreException {
 		Content myParent = getParent();
 		if (myParent == null) {
 			return null;
 		}
-		
-		if (! (myParent instanceof AbstractContent)) {
+
+		if (!(myParent instanceof AbstractContent)) {
 			return null;
 		}
-		
+
 		if (myParent instanceof Pool) {
-			return (Pool)myParent;
+			return (Pool) myParent;
 		}
-		
+
 		// Try one level higher
-		return ((AbstractContent)myParent).getPool();
-	}		
+		return ((AbstractContent) myParent).getPool();
+	}
 
 	/**
 	 * Gets handle of SleuthkitCase to which this content belongs
@@ -314,15 +320,90 @@ public int hashCode() {
 		return hash;
 	}
 
+	@Deprecated
 	@Override
 	public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
 		// don't let them make more than 1 GEN_INFO
 		if (artifactTypeID == ARTIFACT_TYPE.TSK_GEN_INFO.getTypeID()) {
 			return getGenInfoArtifact(true);
 		}
-		return db.newBlackboardArtifact(artifactTypeID, objId);
+		BlackboardArtifact.Type artifactType = db.getArtifactType(artifactTypeID);
+		switch (artifactType.getCategory()) {
+			case DATA_ARTIFACT:
+				return this.newDataArtifact(artifactType, Collections.emptyList());
+			case ANALYSIS_RESULT: {
+				AnalysisResultAdded addedResult = this.newAnalysisResult(artifactType, Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList());
+				return addedResult.getAnalysisResult();
+			}
+			default:
+				throw new TskCoreException(String.format("Unknown category: %s for artifact type id: %d",
+						artifactType.getCategory().getName(), artifactTypeID));
+		}
 	}
 
+	@Override
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+
+		long dataSourceObjectId = this.getDataSource().getId();
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objId, dataSourceObjectId, score, conclusion, configuration, justification, attributesList, trans);
+
+			trans.commit();
+			return resultAdded;
+		} catch (BlackboardException ex) {
+			trans.rollback();
+			throw new TskCoreException(String.format("Error adding analysis result to content with objId = %d.", objId), ex);
+		}
+	}
+
+	@Override
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, long dataSourceId) throws TskCoreException {
+
+		long dataSourceObjectId = dataSourceId;
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objId, dataSourceObjectId, score, conclusion, configuration, justification, attributesList, trans);
+
+			trans.commit();
+			return resultAdded;
+		} catch (BlackboardException ex) {
+			trans.rollback();
+			throw new TskCoreException(String.format("Error adding analysis result to content with objId = %d.", objId), ex);
+		}
+	}
+
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
+		DataArtifact artifact = db.getBlackboard().newDataArtifact(artifactType, objId, this.getDataSource().getId(), attributesList, osAccountId);
+
+		if (osAccountId != null) {
+			try (CaseDbConnection connection = db.getConnection()) {
+				db.getOsAccountManager().newOsAccountInstance(osAccountId, getDataSource().getId(), OsAccountInstance.OsAccountInstanceType.LAUNCHED, connection);
+			}
+		}
+		return artifact;
+	}
+
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId, long dataSourceId) throws TskCoreException {
+		DataArtifact artifact = db.getBlackboard().newDataArtifact(artifactType, objId, dataSourceId, attributesList, osAccountId);
+
+		if (osAccountId != null) {
+			try (CaseDbConnection connection = db.getConnection()) {
+				db.getOsAccountManager().newOsAccountInstance(osAccountId, dataSourceId, OsAccountInstance.OsAccountInstanceType.LAUNCHED, connection);
+			}
+		}
+		return artifact;
+	}
+
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+		return newDataArtifact(artifactType, attributesList, null);
+	}
+
+	@Deprecated
+	@SuppressWarnings("deprecation")
 	@Override
 	public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
 		return newArtifact(type.getTypeID());
@@ -372,7 +453,7 @@ public BlackboardArtifact getGenInfoArtifact(boolean create) throws TskCoreExcep
 		BlackboardArtifact retArt;
 		if (arts.isEmpty()) {
 			if (create) {
-				retArt = db.newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO, objId);
+				retArt = this.newDataArtifact(GEN_INFO_TYPE, Collections.emptyList());
 			} else {
 				return null;
 			}
@@ -408,6 +489,26 @@ public ArrayList<BlackboardArtifact> getAllArtifacts() throws TskCoreException {
 		return db.getMatchingArtifacts("WHERE obj_id = " + objId); //NON-NLS
 	}
 
+	@Override
+	public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
+		return db.getBlackboard().getAnalysisResults(objId);
+	}
+
+	@Override
+	public List<DataArtifact> getAllDataArtifacts() throws TskCoreException {
+		return db.getBlackboard().getDataArtifactsBySource(objId);
+	}
+
+	@Override
+	public Score getAggregateScore() throws TskCoreException {
+		return db.getScoringManager().getAggregateScore(objId);
+	}
+
+	@Override
+	public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException {
+		return db.getBlackboard().getAnalysisResults(objId, artifactType.getTypeID()); //NON-NLS
+	}
+
 	@Override
 	public long getArtifactsCount(String artifactTypeName) throws TskCoreException {
 		return db.getBlackboardArtifactsCount(artifactTypeName, objId);
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java b/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java
index 4e2f90a85e3ec03c998ec518bb02201fd04d739e..3ff266e1ebb459efd2a2d523749e46d89e925a30 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/AbstractFile.java
@@ -1,7 +1,7 @@
 /*
  * SleuthKit Java Bindings
  *
- * Copyright 2011-2020 Basis Technology Corp.
+ * Copyright 2011-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -25,13 +25,18 @@
 import java.sql.Statement;
 import java.text.MessageFormat;
 import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
 import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
 import java.util.ResourceBundle;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.TimeZone;
 import java.util.logging.Level;
 import java.util.logging.Logger;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 import org.sleuthkit.datamodel.TskData.FileKnown;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
@@ -47,8 +52,8 @@ public abstract class AbstractFile extends AbstractContent {
 	protected final TskData.TSK_DB_FILES_TYPE_ENUM fileType;
 	protected final TSK_FS_NAME_TYPE_ENUM dirType;
 	protected final TSK_FS_META_TYPE_ENUM metaType;
-	protected final TSK_FS_NAME_FLAG_ENUM dirFlag;
-	protected final Set<TSK_FS_META_FLAG_ENUM> metaFlags;
+	protected TSK_FS_NAME_FLAG_ENUM dirFlag;
+	protected Set<TSK_FS_META_FLAG_ENUM> metaFlags;
 	protected long size;
 	protected final long metaAddr, ctime, crtime, atime, mtime;
 	protected final int metaSeq;
@@ -90,6 +95,15 @@ public abstract class AbstractFile extends AbstractContent {
 	private static final ResourceBundle BUNDLE = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
 	private long dataSourceObjectId;
 	private final String extension;
+	private final List<Attribute> fileAttributesCache = new ArrayList<Attribute>();
+	private boolean loadedAttributesCacheFromDb = false;
+
+	private final String ownerUid;	// string owner uid, for example a Windows SID.
+	// different from the numeric uid which is more commonly found 
+	// on Unix based file systems.
+	private final Long osAccountObjId; // obj id of the owner's OS account, may be null
+	
+	private volatile String uniquePath;
 
 	/**
 	 * Initializes common fields used by AbstactFile implementations (objects in
@@ -125,8 +139,11 @@ public abstract class AbstractFile extends AbstractContent {
 	 *                           unknown (default)
 	 * @param parentPath
 	 * @param mimeType           The MIME type of the file, can be null.
-	 * @param extension		        The extension part of the file name (not
+	 * @param extension          The extension part of the file name (not
 	 *                           including the '.'), can be null.
+	 * @param ownerUid           Owner uid/SID, can be null if not available.
+	 * @param osAccountObjectId	 Object Id of the owner OsAccount, may be null.
+	 *
 	 */
 	AbstractFile(SleuthkitCase db,
 			long objId,
@@ -144,7 +161,10 @@ public abstract class AbstractFile extends AbstractContent {
 			String md5Hash, String sha256Hash, FileKnown knownState,
 			String parentPath,
 			String mimeType,
-			String extension) {
+			String extension,
+			String ownerUid,
+			Long osAccountObjectId,
+			List<Attribute> fileAttributes) {
 		super(db, objId, name);
 		this.dataSourceObjectId = dataSourceObjectId;
 		this.attrType = attrType;
@@ -176,6 +196,12 @@ public abstract class AbstractFile extends AbstractContent {
 		this.mimeType = mimeType;
 		this.extension = extension == null ? "" : extension;
 		this.encodingType = TskData.EncodingType.NONE;
+		this.ownerUid = ownerUid;
+		this.osAccountObjId = osAccountObjectId;
+		if (Objects.nonNull(fileAttributes) && !fileAttributes.isEmpty()) {
+			this.fileAttributesCache.addAll(fileAttributes);
+			loadedAttributesCacheFromDb = true;
+		}
 	}
 
 	/**
@@ -501,6 +527,76 @@ public String getSha256Hash() {
 		return this.sha256Hash;
 	}
 
+	/**
+	 * Gets the attributes of this File
+	 *
+	 * @return
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<Attribute> getAttributes() throws TskCoreException {
+		synchronized (this) {
+			if (!loadedAttributesCacheFromDb) {
+				ArrayList<Attribute> attributes = getSleuthkitCase().getFileAttributes(this);
+				fileAttributesCache.clear();
+				fileAttributesCache.addAll(attributes);
+				loadedAttributesCacheFromDb = true;
+			}
+			return Collections.unmodifiableList(fileAttributesCache);
+		}
+	}
+
+	/**
+	 * Adds a collection of attributes to this file in a single operation within
+	 * a transaction supplied by the caller.
+	 *
+	 * @param attributes        The collection of attributes.
+	 * @param caseDbTransaction The transaction in the scope of which the
+	 *                          operation is to be performed, managed by the
+	 *                          caller. if Null is passed in a local transaction
+	 *                          will be created and used.
+	 *
+	 * @throws TskCoreException If an error occurs and the attributes were not
+	 *                          added to the artifact.
+	 */
+	public void addAttributes(Collection<Attribute> attributes, final SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException {
+
+		if (Objects.isNull(attributes) || attributes.isEmpty()) {
+			throw new TskCoreException("Illegal Argument passed to addAttributes: null or empty attributes passed to addAttributes");
+		}
+		boolean isLocalTransaction = Objects.isNull(caseDbTransaction);
+		SleuthkitCase.CaseDbTransaction localTransaction = isLocalTransaction ? getSleuthkitCase().beginTransaction() : null;
+		SleuthkitCase.CaseDbConnection connection = isLocalTransaction ? localTransaction.getConnection() : caseDbTransaction.getConnection();
+
+		try {
+			for (final Attribute attribute : attributes) {
+				attribute.setAttributeParentId(getId());
+				attribute.setCaseDatabase(getSleuthkitCase());
+				getSleuthkitCase().addFileAttribute(attribute, connection);
+			}
+
+			if (isLocalTransaction) {
+				localTransaction.commit();
+				localTransaction = null;
+			}
+			// append the new attributes if cache is already loaded.
+			synchronized (this) {
+				if (loadedAttributesCacheFromDb) {
+					fileAttributesCache.addAll(attributes);
+				}
+			}
+		} catch (SQLException ex) {
+			if (isLocalTransaction && null != localTransaction) {
+				try {
+					localTransaction.rollback();
+				} catch (TskCoreException ex2) {
+					LOGGER.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2);
+				}
+			}
+			throw new TskCoreException("Error adding file attributes", ex);
+		}
+	}
+
 	/**
 	 * Sets the known state for this file. Passed in value will be ignored if it
 	 * is "less" than the current state. A NOTABLE file cannot be downgraded to
@@ -638,7 +734,7 @@ public long convertToImgOffset(long fileOffset) throws TskCoreException {
 
 	/**
 	 * Converts a file offset and length into a series of TskFileRange objects
-	 * whose offsets are relative to the image.  This method will only work on
+	 * whose offsets are relative to the image. This method will only work on
 	 * files with layout ranges.
 	 *
 	 * @param fileOffset The byte offset in this file to map.
@@ -686,7 +782,7 @@ public List<TskFileRange> convertToImgRanges(long fileOffset, long length) throw
 
 				// how much this current range exceeds the length requested (or 0 if within the length requested)
 				long rangeOvershoot = Math.max(0, curRangeEnd - requestedEnd);
-				
+
 				long newRangeLen = curRangeLen - rangeOffset - rangeOvershoot;
 				toRet.add(new TskFileRange(newRangeStart, newRangeLen, toRet.size()));
 			}
@@ -839,6 +935,15 @@ public String getDirFlagAsString() {
 		return dirFlag.toString();
 	}
 
+	/**
+	 * Set the directory name flag.
+	 *
+	 * @param flag Flag to set to.
+	 */
+	void setDirFlag(TSK_FS_NAME_FLAG_ENUM flag) {
+		dirFlag = flag;
+	}
+
 	/**
 	 * @return a string representation of the meta flags
 	 */
@@ -861,6 +966,33 @@ public boolean isMetaFlagSet(TSK_FS_META_FLAG_ENUM metaFlag) {
 		return metaFlags.contains(metaFlag);
 	}
 
+	/**
+	 * Set the specified meta flag.
+	 *
+	 * @param metaFlag Meta flag to set
+	 */
+	void setMetaFlag(TSK_FS_META_FLAG_ENUM metaFlag) {
+		metaFlags.add(metaFlag);
+	}
+
+	/**
+	 * Remove the specified meta flag.
+	 *
+	 * @param metaFlag Meta flag to remove.
+	 */
+	void removeMetaFlag(TSK_FS_META_FLAG_ENUM metaFlag) {
+		metaFlags.remove(metaFlag);
+	}
+
+	/**
+	 * Get meta flags as an integer.
+	 *
+	 * @return Integer representation of the meta flags.
+	 */
+	short getMetaFlagsAsInt() {
+		return TSK_FS_META_FLAG_ENUM.toInt(metaFlags);
+	}
+
 	@Override
 	public final int read(byte[] buf, long offset, long len) throws TskCoreException {
 		//template method
@@ -915,14 +1047,6 @@ protected final int readLocal(byte[] buf, long offset, long len) throws TskCoreE
 		}
 
 		loadLocalFile();
-		if (!localFile.exists()) {
-			throw new TskCoreException(
-					MessageFormat.format(BUNDLE.getString("AbstractFile.readLocal.exception.msg2.text"), localAbsPath));
-		}
-		if (!localFile.canRead()) {
-			throw new TskCoreException(
-					MessageFormat.format(BUNDLE.getString("AbstractFile.readLocal.exception.msg3.text"), localAbsPath));
-		}
 
 		int bytesRead = 0;
 
@@ -1181,67 +1305,140 @@ public MimeMatchEnum isMimeType(SortedSet<String> mimeTypes) {
 	}
 
 	/**
-	 * Saves the editable file properties of this file to the case database,
-	 * e.g., the MIME type, MD5 hash, and known state.
+	 * Saves the editable properties of this file to the case database, e.g.,
+	 * the MIME type, MD5 hash, and known state.
 	 *
 	 * @throws TskCoreException if there is an error saving the editable file
 	 *                          properties to the case database.
 	 */
 	public void save() throws TskCoreException {
+		CaseDbTransaction transaction = null;
+		try {
+			transaction = getSleuthkitCase().beginTransaction();
+			save(transaction);
+			transaction.commit();
+		} catch (TskCoreException ex) {
+			if (transaction != null) {
+				transaction.rollback();
+			}
+			throw ex;
+		}
+	}
 
-		// No fields have been updated
+	/**
+	 * Saves the editable properties of this file to the case database, e.g.,
+	 * the MIME type, MD5 hash, and known state, in the context of a given case
+	 * database transaction.
+	 *
+	 * @param transaction The transaction.
+	 *
+	 * @throws TskCoreException if there is an error saving the editable file
+	 *                          properties to the case database.
+	 */
+	public void save(CaseDbTransaction transaction) throws TskCoreException {
 		if (!(md5HashDirty || sha256HashDirty || mimeTypeDirty || knownStateDirty)) {
 			return;
 		}
 
-		String queryStr = "";
+		String updateSql = "";
 		if (mimeTypeDirty) {
-			queryStr = "mime_type = '" + this.getMIMEType() + "'";
+			updateSql = "mime_type = '" + this.getMIMEType() + "'";
 		}
 		if (md5HashDirty) {
-			if (!queryStr.isEmpty()) {
-				queryStr += ", ";
+			if (!updateSql.isEmpty()) {
+				updateSql += ", ";
 			}
-			queryStr += "md5 = '" + this.getMd5Hash() + "'";
+			updateSql += "md5 = '" + this.getMd5Hash() + "'";
 		}
 		if (sha256HashDirty) {
-			if (!queryStr.isEmpty()) {
-				queryStr += ", ";
+			if (!updateSql.isEmpty()) {
+				updateSql += ", ";
 			}
-			queryStr += "sha256 = '" + this.getSha256Hash() + "'";
+			updateSql += "sha256 = '" + this.getSha256Hash() + "'";
 		}
 		if (knownStateDirty) {
-			if (!queryStr.isEmpty()) {
-				queryStr += ", ";
+			if (!updateSql.isEmpty()) {
+				updateSql += ", ";
 			}
-			queryStr += "known = '" + this.getKnown().getFileKnownValue() + "'";
+			updateSql += "known = '" + this.getKnown().getFileKnownValue() + "'";
 		}
+		updateSql = "UPDATE tsk_files SET " + updateSql + " WHERE obj_id = " + this.getId();
 
-		queryStr = "UPDATE tsk_files SET " + queryStr + " WHERE obj_id = " + this.getId();
-
-		getSleuthkitCase().acquireSingleUserCaseWriteLock();
-		try (SleuthkitCase.CaseDbConnection connection = getSleuthkitCase().getConnection();
-				Statement statement = connection.createStatement();) {
-
-			connection.executeUpdate(statement, queryStr);
+		SleuthkitCase.CaseDbConnection connection = transaction.getConnection();
+		try (Statement statement = connection.createStatement()) {
+			connection.executeUpdate(statement, updateSql);
 			md5HashDirty = false;
 			sha256HashDirty = false;
 			mimeTypeDirty = false;
 			knownStateDirty = false;
 		} catch (SQLException ex) {
-			throw new TskCoreException(String.format("Error saving properties for file (obj_id = %s)", this.getId()), ex);
-		} finally {
-			getSleuthkitCase().releaseSingleUserCaseWriteLock();
+			throw new TskCoreException(String.format("Error updating properties of file %s (obj_id = %s)", getName(), getId()), ex);
 		}
 	}
 
+	/**
+	 * Get the owner uid.
+	 *
+	 * Note this is a string uid, typically a Windows SID. This is different
+	 * from the numeric uid commonly found on Unix based file systems.
+	 *
+	 * @return Optional with owner uid.
+	 */
+	public Optional<String> getOwnerUid() {
+		return Optional.ofNullable(ownerUid);
+	}
+
+	/**
+	 * Get the Object Id of the owner account.
+	 *
+	 * @return Optional with Object Id of the OsAccount, or Optional.empty.
+	 */
+	public Optional<Long> getOsAccountObjectId() {
+		return Optional.ofNullable(osAccountObjId);
+	}
+	
 	@Override
-	public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
-		// don't let them make more than 1 GEN_INFO
-		if (artifactTypeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_GEN_INFO.getTypeID()) {
-			return getGenInfoArtifact(true);
+	public String getUniquePath() throws TskCoreException {
+
+		if (uniquePath == null) {
+			Content dataSource = getDataSource();
+			if (dataSource instanceof LocalFilesDataSource) {
+				if(dataSource != this) {
+					uniquePath = dataSource.getUniquePath() + parentPath + getName();
+				} else {
+					uniquePath =  "/" + getName();
+				}
+			} else {
+				uniquePath = super.getUniquePath();
+			}
 		}
-		return getSleuthkitCase().newBlackboardArtifact(artifactTypeID, getId(), dataSourceObjectId);
+
+		return uniquePath;
+	}
+
+	@Deprecated
+	@SuppressWarnings("deprecation")
+	@Override
+	public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
+		return super.newArtifact(artifactTypeID);
+	}
+
+	/**
+	 * Create and add a data artifact associated with this abstract file. This
+	 * method creates the data artifact with the os account id associated with
+	 * this abstract file if one exists.
+	 *
+	 * @param artifactType   Type of data artifact to create.
+	 * @param attributesList Additional attributes to attach to this data
+	 *                       artifact.
+	 *
+	 * @return DataArtifact New data artifact.
+	 *
+	 * @throws TskCoreException If a critical error occurred within tsk core.
+	 */
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+		return super.newDataArtifact(artifactType, attributesList, getOsAccountObjectId().orElse(null));
 	}
 
 	/**
@@ -1282,7 +1479,7 @@ protected AbstractFile(SleuthkitCase db, long objId, TskData.TSK_FS_ATTR_TYPE_EN
 			TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType, TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags,
 			long size, long ctime, long crtime, long atime, long mtime, short modes, int uid, int gid, String md5Hash, FileKnown knownState,
 			String parentPath) {
-		this(db, objId, db.getDataSourceObjectId(objId), attrType, (int) attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null);
+		this(db, objId, db.getDataSourceObjectId(objId), attrType, (int) attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList());
 	}
 
 	/**
@@ -1327,7 +1524,7 @@ protected AbstractFile(SleuthkitCase db, long objId, TskData.TSK_FS_ATTR_TYPE_EN
 			String name, TskData.TSK_DB_FILES_TYPE_ENUM fileType, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType,
 			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime, short modes,
 			int uid, int gid, String md5Hash, FileKnown knownState, String parentPath, String mimeType) {
-		this(db, objId, dataSourceObjectId, attrType, (int) attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null);
+		this(db, objId, dataSourceObjectId, attrType, (int) attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList());
 	}
 
 	/**
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AccountFileInstance.java b/bindings/java/src/org/sleuthkit/datamodel/AccountFileInstance.java
index 2c4ea7c078bed4726c2b57db09b86140fda7bcb7..6552269112f007d2faecc266167c68f362e77976 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/AccountFileInstance.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/AccountFileInstance.java
@@ -103,7 +103,7 @@ public Content getFile() throws TskCoreException {
 	 *
 	 * @return A Data Source Object ID
 	 */
-	long getDataSourceObjectID() {
+	Long getDataSourceObjectID() {
 		return artifact.getDataSourceObjectID();
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AnalysisResult.java b/bindings/java/src/org/sleuthkit/datamodel/AnalysisResult.java
new file mode 100644
index 0000000000000000000000000000000000000000..4632c2233959beda3338b2a7a878a8ec82e264f2
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/AnalysisResult.java
@@ -0,0 +1,151 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *	 http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+/**
+ * An AnalysisResult represents the outcome of some analysis technique that was
+ * applied to some data (i.e. Content) to determine the data's relevance. The
+ * result should have a conclusion and a relevance score. The score of the
+ * AnalysisResult will be used to calculate the aggregate score of the parent
+ * data. Additional metadata can be stored as BlackboardAttributes.
+ */
+public class AnalysisResult extends BlackboardArtifact {
+
+	private final String conclusion;	// conclusion of analysis - may be an empty string
+	private final Score score;			// relevance score based on the conclusion 
+	private final String configuration; // Optional descriptor of configuration of analysis technique (such as a set name). Maybe empty string
+	private final String justification;  // justification/explanation of the conclusion.  Maybe empty string.
+
+	private boolean ignoreResult = false; // ignore this analysis result when computing score of the parent object.
+
+	/**
+	 * Constructs an analysis result.
+	 *
+	 * @param sleuthkitCase    The SleuthKit case (case database) that contains
+	 *                         the artifact data.
+	 * @param artifactID       The unique id for this artifact.
+	 * @param sourceObjId      The unique id of the content with which this
+	 *                         artifact is associated.
+	 * @param artifactObjId    The unique id this artifact, in tsk_objects.
+	 * @param dataSourceObjId  Object ID of the datasource where the artifact
+	 *                         was found. May be null.
+	 * @param artifactTypeID   The type id of this artifact.
+	 * @param artifactTypeName The type name of this artifact.
+	 * @param displayName      The display name of this artifact.
+	 * @param reviewStatus     The review status of this artifact.
+	 * @param score            The score assigned by the analysis.
+	 * @param conclusion       Conclusion arrived at by the analysis. May be
+	 *                         null.
+	 * @param configuration    Configuration used for analysis. May be null.
+	 * @param justification	   Justification for the analysis. May be null.
+	 */
+	AnalysisResult(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, Long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, Score score, String conclusion, String configuration, String justification) {
+		super(sleuthkitCase, artifactID, sourceObjId, artifactObjId, dataSourceObjId, artifactTypeID, artifactTypeName, displayName, reviewStatus);
+		this.score = score;
+		this.conclusion = (conclusion != null) ? conclusion : "";
+		this.configuration = (configuration != null) ? configuration : "";
+		this.justification = (justification != null) ? justification : "";
+	}
+
+	/**
+	 * Constructs an analysis result.
+	 *
+	 * @param sleuthkitCase    The SleuthKit case (case database) that contains
+	 *                         the artifact data.
+	 * @param artifactID       The unique id for this artifact.
+	 * @param sourceObjId      The unique id of the content with which this
+	 *                         artifact is associated.
+	 * @param artifactObjId    The unique id this artifact, in tsk_objects.
+	 * @param dataSourceObjId  Object ID of the datasource where the artifact
+	 *                         was found. May be null.
+	 * @param artifactTypeID   The type id of this artifact.
+	 * @param artifactTypeName The type name of this artifact.
+	 * @param displayName      The display name of this artifact.
+	 * @param reviewStatus     The review status of this artifact.
+	 * @param isNew            If this analysis result is newly created.
+	 * @param score            The score assigned by the analysis.
+	 * @param conclusion       Conclusion arrived at by the analysis. May be
+	 *                         null.
+	 * @param configuration    Configuration used for analysis. May be null.
+	 * @param justification	   Justification for the analysis. May be null.
+	 */
+	AnalysisResult(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjID, Long dataSourceObjID, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, boolean isNew, Score score, String conclusion, String configuration, String justification) {
+		super(sleuthkitCase, artifactID, sourceObjId, artifactObjID, dataSourceObjID, artifactTypeID, artifactTypeName, displayName, reviewStatus, isNew);
+		this.score = score;
+		this.conclusion = (conclusion != null) ? conclusion : "";
+		this.configuration = (configuration != null) ? configuration : "";
+		this.justification = (justification != null) ? justification : "";
+	}
+
+	/**
+	 * Returns analysis result conclusion.
+	 *
+	 * @return Conclusion, returns an empty string if not set.
+	 */
+	public String getConclusion() {
+		return conclusion;
+	}
+
+	/**
+	 * Returns relevance score based on conclusion
+	 *
+	 * @return Score.
+	 */
+	public Score getScore() {
+		return score;
+	}
+
+	/**
+	 * Returns configuration used in analysis.
+	 *
+	 * @return Configuration, returns an empty string if not set.
+	 */
+	public String getConfiguration() {
+		return configuration;
+	}
+
+	/**
+	 * Returns justification for conclusion
+	 *
+	 * @return justification, returns an empty string if not set.
+	 */
+	public String getJustification() {
+		return justification;
+	}
+
+	/**
+	 * Sets if this result is to be ignored when calculating the aggregate score
+	 * of the parent object.
+	 *
+	 * @param ignore if the result should be ignored or not.
+	 */
+	public void setIgnoreResult(boolean ignore) {
+		ignoreResult = ignore;
+	}
+
+	/**
+	 * Checks if this result is to be ignored.
+	 *
+	 * @return true is the result should should be ignored, false otherwise.
+	 */
+	public boolean ignoreResult() {
+		return ignoreResult;
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/AnalysisResultAdded.java b/bindings/java/src/org/sleuthkit/datamodel/AnalysisResultAdded.java
new file mode 100644
index 0000000000000000000000000000000000000000..8280d8d1e0a63a2692611bf3913c1adc023e427f
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/AnalysisResultAdded.java
@@ -0,0 +1,43 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *	 http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+/**
+ * This class encapsulates an analysis result added to Content, and the
+ * content's aggregate score upon adding the analysis result.
+ */
+public class AnalysisResultAdded {
+
+	private final AnalysisResult analysisResult;
+	private final Score score;
+
+	AnalysisResultAdded(AnalysisResult analysisResult, Score score) {
+		this.analysisResult = analysisResult;
+		this.score = score;
+	}
+
+	public AnalysisResult getAnalysisResult() {
+		return analysisResult;
+	}
+
+	public Score getAggregateScore() {
+		return score;
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Attribute.java b/bindings/java/src/org/sleuthkit/datamodel/Attribute.java
new file mode 100644
index 0000000000000000000000000000000000000000..829bdf5256f42db143852bcdb5c2e6aa6135bd1a
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/Attribute.java
@@ -0,0 +1,213 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *	 http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.base.MoreObjects;
+import java.util.Arrays;
+import java.util.Objects;
+
+/**
+ * This is a concrete implementation of a simple Attribute Type.
+ */
+public class Attribute extends AbstractAttribute{
+ 
+	/**
+	 * The `parent` object of this Attribute.
+	 */
+	private long attributeParentId;
+	
+	/**
+	 * Primary key in the respective attribute table.
+	 */
+	private long id;
+
+	
+	/**
+	 * Constructs an attribute with an integer value. The attribute should be
+	 * added to an appropriate artifact.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueInt      The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER.
+	 */
+	public Attribute(BlackboardAttribute.Type attributeType, int valueInt) throws IllegalArgumentException {
+		super(attributeType, valueInt);
+	}
+
+ 
+	/**
+	 * Constructs an attribute with a long/datetime value. The attribute should
+	 * be added to an appropriate artifact.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueLong     The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  standard attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG
+	 *                                  or
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME.
+	 */
+	public Attribute(BlackboardAttribute.Type attributeType, long valueLong) throws IllegalArgumentException {
+		super(attributeType, valueLong);
+	}
+
+
+	/**
+	 * Constructs an attribute with a double value. The attribute should be
+	 * added to an appropriate artifact.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueDouble   The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE.
+	 */
+	public Attribute(BlackboardAttribute.Type attributeType, double valueDouble) throws IllegalArgumentException {
+		super(attributeType, valueDouble);
+	}
+
+ 
+	/**
+	 * Constructs an attribute with a string value. The attribute should be
+	 * added to an appropriate artifact.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueString   The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING.
+	 */
+	public Attribute(BlackboardAttribute.Type attributeType, String valueString) throws IllegalArgumentException {
+		super(attributeType, valueString);
+	}
+
+
+	/**
+	 * Constructs an attribute with a byte array value. The attribute should be
+	 * added to an appropriate artifact.
+	 *
+	 * @param attributeType The attribute type.
+	 * @param valueBytes    The attribute value.
+	 *
+	 * @throws IllegalArgumentException If the value type of the specified
+	 *                                  attribute type is not
+	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE.
+	 */
+	public Attribute(BlackboardAttribute.Type attributeType, byte[] valueBytes) throws IllegalArgumentException {
+		super(attributeType, valueBytes);
+	}
+	
+	/**
+	 * Constructs an artifact attribute. To be used when creating an attribute
+	 * based on a query of the blackboard _attributes table in the case
+	 * database.
+	 *
+	 * @param attributeOwnerId The owner id for this attribute.
+	 * @param attributeTypeID  The attribute type id.
+	 * @param valueType        The attribute value type.
+	 * @param valueInt         The value from the the value_int32 column.
+	 * @param valueLong        The value from the the value_int64 column.
+	 * @param valueDouble      The value from the the value_double column.
+	 * @param valueString      The value from the the value_text column.
+	 * @param valueBytes       The value from the the value_byte column.
+	 * @param sleuthkitCase    A reference to the SleuthkitCase object
+	 *                         representing the case database.
+	 */
+	Attribute(long id, long attributeOwnerId, BlackboardAttribute.Type attributeType,  
+			int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes,
+			SleuthkitCase sleuthkitCase) {
+		super(attributeType, valueInt, valueLong, valueDouble, valueString, valueBytes, sleuthkitCase);
+		this.id = id;
+	}
+
+	/**
+	 * Gets the parent Id of this attribute. A parent is defined as the Object
+	 * to which this attribute is associated with. Eg: For a file Attribute the
+	 * attribute parent id would be the file object id.
+	 *
+	 * @return
+	 */
+	final public long getAttributeParentId() {
+		return this.attributeParentId;
+	}
+
+	/**
+	 * Set the parent id for this attribute. Parent is defined as the Object 
+	 * to which this attribute is associated with. 
+	 * @param attributeParentId 
+	 */
+	final void setAttributeParentId(long attributeParentId) {
+		this.attributeParentId = attributeParentId;
+	}
+
+	
+	/**
+	 * Returns the Id of the Attribute. 
+	 * @return 
+	 */
+	public long getId() {
+		return id;
+	}
+	
+	
+	/**
+	 * Set the id of the attribute
+	 * @param id 
+	 */
+	void setId(long id) {
+		this.id = id;
+	}
+	
+	@Override
+	public int hashCode() {
+		return Objects.hash(
+				this.getAttributeType(), this.getValueInt(), this.getValueLong(), this.getValueDouble(),
+				this.getValueString(), this.getValueBytes());
+	}
+
+	@Override
+	public boolean equals(Object that) {
+		if (this == that) {
+			return true;
+		} else if (that instanceof Attribute) {
+ 			return areValuesEqual(that);
+		} else {
+			return false;
+		}
+	}
+
+	@Override
+	public String toString() {
+		return MoreObjects.toStringHelper(this)
+				.add("attributeType", getAttributeType().toString())
+				.add("valueInt", getValueInt())
+				.add("valueLong", getValueLong())
+				.add("valueDouble", getValueDouble())
+				.add("valueString", getValueString())
+				.add("valueBytes", Arrays.toString(getValueBytes()) )
+				.add("Case", getCaseDatabase())
+				.toString();
+	}
+}
\ No newline at end of file
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java b/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java
index 389e7c6fb28436c04ba1263996f16768f39cbc96..6df67e88659a184ef3d82dc39da5ab9b410e7bc5 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Blackboard.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2018 Basis Technology Corp.
+ * Copyright 2018-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,6 +19,7 @@
 package org.sleuthkit.datamodel;
 
 import com.google.common.collect.ImmutableSet;
+import java.sql.PreparedStatement;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -30,7 +31,11 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 import java.util.stream.Collectors;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 
 /**
  * A representation of the blackboard, a place where artifacts and their
@@ -38,6 +43,8 @@
  */
 public final class Blackboard {
 
+	private static final Logger LOGGER = Logger.getLogger(Blackboard.class.getName());
+
 	private final SleuthkitCase caseDb;
 
 	/**
@@ -100,6 +107,8 @@ public void postArtifacts(Collection<BlackboardArtifact> artifacts, String modul
 	 * Gets an artifact type, creating it if it does not already exist. Use this
 	 * method to define custom artifact types.
 	 *
+	 * This assumes that the artifact type is of category DATA_ARTIFACT.
+	 *
 	 * @param typeName    The type name of the artifact type.
 	 * @param displayName The display name of the artifact type.
 	 *
@@ -110,8 +119,29 @@ public void postArtifacts(Collection<BlackboardArtifact> artifacts, String modul
 	 */
 	public BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName) throws BlackboardException {
 
+		return getOrAddArtifactType(typeName, displayName, BlackboardArtifact.Category.DATA_ARTIFACT);
+	}
+
+	/**
+	 * Gets an artifact type, creating it if it does not already exist. Use this
+	 * method to define custom artifact types.
+	 *
+	 * @param typeName    The type name of the artifact type.
+	 * @param displayName The display name of the artifact type.
+	 * @param category    The artifact type category.
+	 *
+	 * @return A type object representing the artifact type.
+	 *
+	 * @throws BlackboardException If there is a problem getting or adding the
+	 *                             artifact type.
+	 */
+	public BlackboardArtifact.Type getOrAddArtifactType(String typeName, String displayName, BlackboardArtifact.Category category) throws BlackboardException {
+		if (category == null) {
+			throw new BlackboardException("Category provided must be non-null");
+		}
+		
 		try {
-			return caseDb.addBlackboardArtifactType(typeName, displayName);
+			return caseDb.addBlackboardArtifactType(typeName, displayName, category);
 		} catch (TskDataException typeExistsEx) {
 			try {
 				return caseDb.getArtifactType(typeName);
@@ -123,6 +153,647 @@ public BlackboardArtifact.Type getOrAddArtifactType(String typeName, String disp
 		}
 	}
 
+	/**
+	 * Adds new analysis result artifact.
+	 *
+	 * @param artifactType    Type of analysis result artifact to create.
+	 * @param objId           Object id of parent.
+	 * @param dataSourceObjId Data source object id, may be null.
+	 * @param score	          Score associated with this analysis result.
+	 * @param conclusion      Conclusion of the analysis, may be null or an
+	 *                        empty string.
+	 * @param configuration   Configuration associated with this analysis, may
+	 *                        be null or an empty string.
+	 * @param justification   Justification, may be null or an empty string.
+	 * @param attributesList  Attributes to be attached to this analysis result
+	 *                        artifact.
+	 *
+	 * @return AnalysisResultAdded The analysis return added and the current
+	 *         aggregate score of content.
+	 *
+	 * @throws TskCoreException
+	 * @throws BlackboardException exception thrown if a critical error occurs
+	 *                             within TSK core
+	 */
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score,
+			String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList)
+			throws BlackboardException, TskCoreException {
+
+		if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) {
+			throw new BlackboardException(String.format("Artifact type (name = %s) is not of Analysis Result category. ", artifactType.getTypeName()));
+		}
+
+		CaseDbTransaction transaction = caseDb.beginTransaction();
+		try {
+			AnalysisResultAdded analysisResult = newAnalysisResult(artifactType, objId, dataSourceObjId, score,
+					conclusion, configuration, justification, attributesList, transaction);
+			transaction.commit();
+			return analysisResult;
+		} catch (TskCoreException | BlackboardException ex) {
+			try {
+				transaction.rollback();
+			} catch (TskCoreException ex2) {
+				LOGGER.log(Level.SEVERE, "Failed to rollback transaction after exception. "
+						+ "Error invoking newAnalysisResult with dataSourceObjId: "
+						+ (dataSourceObjId == null ? "<null>" : dataSourceObjId)
+						+ ",  sourceObjId: " + objId, ex2);
+			}
+			throw ex;
+		}
+	}
+
+	/**
+	 * Adds new analysis result artifact.
+	 *
+	 * @param artifactType    Type of analysis result artifact to create.
+	 * @param objId           Object id of parent.
+	 * @param dataSourceObjId Data source object id, may be null.
+	 * @param score	          Score associated with this analysis result.
+	 * @param conclusion      Conclusion of the analysis, may be null or an
+	 *                        empty string.
+	 * @param configuration   Configuration associated with this analysis, may
+	 *                        be null or an empty string.
+	 * @param justification   Justification, may be null or an empty string.
+	 * @param attributesList  Attributes to be attached to this analysis result
+	 *                        artifact.
+	 * @param transaction     DB transaction to use.
+	 *
+	 * @return AnalysisResultAdded The analysis return added and the current
+	 *         aggregate score of content.
+	 *
+	 * @throws BlackboardException exception thrown if a critical error occurs
+	 *                             within TSK core
+	 */
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score,
+			String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, CaseDbTransaction transaction) throws BlackboardException {
+
+		if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) {
+			throw new BlackboardException(String.format("Artifact type (name = %s) is not of Analysis Result category. ", artifactType.getTypeName()));
+		}
+
+		try {
+			// add analysis result
+			AnalysisResult analysisResult = caseDb.newAnalysisResult(artifactType, objId, dataSourceObjId, score, conclusion, configuration, justification, transaction.getConnection());
+
+			// add the given attributes
+			if (attributesList != null && !attributesList.isEmpty()) {
+				analysisResult.addAttributes(attributesList, transaction);
+			}
+
+			// update the final score for the object 
+			Score aggregateScore = caseDb.getScoringManager().updateAggregateScoreAfterAddition(objId, dataSourceObjId, analysisResult.getScore(), transaction);
+
+			// return the analysis result and the current aggregate score.
+			return new AnalysisResultAdded(analysisResult, aggregateScore);
+
+		} catch (TskCoreException ex) {
+			throw new BlackboardException("Failed to add analysis result.", ex);
+		}
+	}
+
+	/**
+	 * Delete the specified analysis result.
+	 *
+	 * Deletes the result from blackboard_artifacts and tsk_analysis_results,
+	 * and recalculates and updates the aggregate score of the content. Fires an
+	 * event to indicate that the analysis result has been deleted and that the
+	 * score of the item has changed.
+	 *
+	 * @param analysisResult AnalysisResult to delete.
+	 *
+	 * @return New score of the content.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Score deleteAnalysisResult(AnalysisResult analysisResult) throws TskCoreException {
+
+		CaseDbTransaction transaction = this.caseDb.beginTransaction();
+		try {
+			Score score = deleteAnalysisResult(analysisResult, transaction);
+			transaction.commit();
+			transaction = null;
+
+			return score;
+		} finally {
+			if (transaction != null) {
+				transaction.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Delete the specified analysis result.
+	 *
+	 * Deletes the result from blackboard_artifacts and tsk_analysis_results,
+	 * and recalculates and updates the aggregate score of the content.
+	 *
+	 * @param artifactObjId Artifact Obj Id to be deleted
+	 * @param transaction
+	 *
+	 * @return
+	 *
+	 * @throws TskCoreException
+	 */
+	public Score deleteAnalysisResult(long artifactObjId, CaseDbTransaction transaction) throws TskCoreException {
+
+		List<AnalysisResult> analysisResults = getAnalysisResultsWhere(" arts.artifact_obj_id = " + artifactObjId, transaction.getConnection());
+
+		if (analysisResults.isEmpty()) {
+			throw new TskCoreException(String.format("Analysis Result not found for artifact obj id %d", artifactObjId));
+		}
+
+		return deleteAnalysisResult(analysisResults.get(0), transaction);
+	}
+
+	/**
+	 * Delete the specified analysis result.
+	 *
+	 * Deletes the result from blackboard_artifacts and tsk_analysis_results,
+	 * and recalculates and updates the aggregate score of the content.
+	 *
+	 * @param analysisResult AnalysisResult to delete.
+	 * @param transaction    Transaction to use for database operations.
+	 *
+	 * @return New score of the content.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Score deleteAnalysisResult(AnalysisResult analysisResult, CaseDbTransaction transaction) throws TskCoreException {
+
+		try {
+			CaseDbConnection connection = transaction.getConnection();
+
+			// delete the blackboard artifacts row. This will also delete the tsk_analysis_result row
+			String deleteSQL = "DELETE FROM blackboard_artifacts WHERE artifact_obj_id = ?";
+
+			PreparedStatement deleteStatement = connection.getPreparedStatement(deleteSQL, Statement.RETURN_GENERATED_KEYS);
+			deleteStatement.clearParameters();
+			deleteStatement.setLong(1, analysisResult.getId());
+
+			deleteStatement.executeUpdate();
+
+			// register the deleted result with the transaction so an event can be fired for it. 
+			transaction.registerDeletedAnalysisResult(analysisResult.getObjectID());
+
+			return caseDb.getScoringManager().updateAggregateScoreAfterDeletion(analysisResult.getObjectID(), analysisResult.getDataSourceObjectID(), transaction);
+
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error deleting analysis result with artifact obj id %d", analysisResult.getId()), ex);
+		}
+	}
+
+	private final static String ANALYSIS_RESULT_QUERY_STRING = "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
+			+ " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
+			+ " types.type_name AS type_name, types.display_name AS display_name, types.category_type as category_type,"//NON-NLS
+			+ " arts.review_status_id AS review_status_id, " //NON-NLS
+			+ " results.conclusion AS conclusion,  results.significance AS significance,  results.priority AS priority,  "
+			+ " results.configuration AS configuration,  results.justification AS justification "
+			+ " FROM blackboard_artifacts AS arts "
+			+ " JOIN blackboard_artifact_types AS types " //NON-NLS
+			+ "		ON arts.artifact_type_id = types.artifact_type_id" //NON-NLS
+			+ " LEFT JOIN tsk_analysis_results AS results "
+			+ "		ON arts.artifact_obj_id = results.artifact_obj_id " //NON-NLS
+			+ " WHERE arts.review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID() //NON-NLS
+			+ "     AND types.category_type = " + BlackboardArtifact.Category.ANALYSIS_RESULT.getID(); // NON-NLS
+
+	
+	/**
+	 * Get all analysis results of given artifact type.
+	 *
+	 * @param artifactTypeId The artifact type id for which to search.
+	 *
+	 * @return The list of analysis results.
+	 *
+	 * @throws TskCoreException Exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<AnalysisResult> getAnalysisResultsByType(int artifactTypeId) throws TskCoreException {
+		return getAnalysisResultsWhere(" arts.artifact_type_id = " + artifactTypeId);
+	}
+
+	/**
+	 * Get all analysis results of given artifact type.
+	 *
+	 * @param artifactTypeId The artifact type id for which to search.
+	 * @param dataSourceObjId Object Id of the data source to look under.
+	 * 
+	 * @return The list of analysis results.
+	 *
+	 * @throws TskCoreException Exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<AnalysisResult> getAnalysisResultsByType(int artifactTypeId, long dataSourceObjId) throws TskCoreException {
+		return getAnalysisResultsWhere(" arts.artifact_type_id = " + artifactTypeId + " AND arts.data_source_obj_id = " + dataSourceObjId);
+	}
+
+	
+	/**
+	 * Get all analysis results for a given object.
+	 *
+	 * @param sourceObjId Object id.
+	 *
+	 * @return list of analysis results.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<AnalysisResult> getAnalysisResults(long sourceObjId) throws TskCoreException {
+		return getAnalysisResultsWhere(" arts.obj_id = " + sourceObjId);
+	}
+	
+	
+	/**
+	 * Get all data artifacts for a given object.
+	 *
+	 * @param sourceObjId Object id.
+	 *
+	 * @return List of data artifacts.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	List<DataArtifact> getDataArtifactsBySource(long sourceObjId) throws TskCoreException {
+		caseDb.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = caseDb.getConnection()) {
+			return getDataArtifactsWhere(String.format(" artifacts.obj_id = " + sourceObjId), connection);
+		} finally {
+			caseDb.releaseSingleUserCaseReadLock();
+		}
+	}
+	
+	
+	/**
+	 * Returns true if there are data artifacts belonging to the sourceObjId.
+	 * @param sourceObjId The source content object id.
+	 * @return True if there are data artifacts belonging to this source obj id.
+	 * @throws TskCoreException 
+	 */
+	public boolean hasDataArtifacts(long sourceObjId) throws TskCoreException {
+		return hasArtifactsOfCategory(BlackboardArtifact.Category.DATA_ARTIFACT, sourceObjId);
+	}
+	
+	/**
+	 * Returns true if there are analysis results belonging to the sourceObjId.
+	 * @param sourceObjId The source content object id.
+	 * @return True if there are analysis results belonging to this source obj id.
+	 * @throws TskCoreException 
+	 */
+	public boolean hasAnalysisResults(long sourceObjId) throws TskCoreException {
+		return hasArtifactsOfCategory(BlackboardArtifact.Category.ANALYSIS_RESULT, sourceObjId);
+	}
+	
+	
+	/**
+	 * Returns true if there are artifacts of the given category belonging to the sourceObjId.
+	 * @param category The category of the artifacts.
+	 * @param sourceObjId The source content object id.
+	 * @return True if there are artifacts of the given category belonging to this source obj id.
+	 * @throws TskCoreException 
+	 */
+	private boolean hasArtifactsOfCategory(BlackboardArtifact.Category category, long sourceObjId) throws TskCoreException {
+		String queryString = "SELECT COUNT(*) AS count " //NON-NLS
+			+ " FROM blackboard_artifacts AS arts "
+			+ " JOIN blackboard_artifact_types AS types " //NON-NLS
+			+ "		ON arts.artifact_type_id = types.artifact_type_id" //NON-NLS
+			+ " WHERE types.category_type = " + category.getID()
+			+ " AND arts.obj_id = " + sourceObjId;
+
+		caseDb.acquireSingleUserCaseReadLock();
+		try (SleuthkitCase.CaseDbConnection connection = caseDb.getConnection();
+				Statement statement = connection.createStatement();
+				ResultSet resultSet = connection.executeQuery(statement, queryString);) {
+			if (resultSet.next()) {
+				return resultSet.getLong("count") > 0;
+			}
+			return false;
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting artifact types is use for data source." + ex.getMessage(), ex);
+		} finally {
+			caseDb.releaseSingleUserCaseReadLock();
+		}
+	}
+
+
+	
+	
+	/**
+	 * Get all analysis results for a given object.
+	 *
+	 * @param sourceObjId Object id.
+	 * @param connection  Database connection to use.
+	 *
+	 *
+	 * @return list of analysis results.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	List<AnalysisResult> getAnalysisResults(long sourceObjId, CaseDbConnection connection) throws TskCoreException {
+		return getAnalysisResultsWhere(" arts.obj_id = " + sourceObjId, connection);
+	}
+
+	/**
+	 * Get analysis results of the given type, for the given object.
+	 *
+	 * @param sourceObjId    Object id.
+	 * @param artifactTypeId Result type to get.
+	 *
+	 * @return list of analysis results.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<AnalysisResult> getAnalysisResults(long sourceObjId, int artifactTypeId) throws TskCoreException {
+		// Get the artifact type to check that it in the analysis result category.
+		BlackboardArtifact.Type artifactType = caseDb.getArtifactType(artifactTypeId);
+		if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) {
+			throw new TskCoreException(String.format("Artifact type id %d is not in analysis result catgeory.", artifactTypeId));
+		}
+
+		String whereClause = " types.artifact_type_id = " + artifactTypeId
+				+ " AND arts.obj_id = " + sourceObjId;
+		return getAnalysisResultsWhere(whereClause);
+	}
+
+	/**
+	 * Get all analysis results matching the given where sub-clause.
+	 *
+	 *
+	 * @param whereClause Where sub clause, specifies conditions to match.
+	 *
+	 * @return list of analysis results.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<AnalysisResult> getAnalysisResultsWhere(String whereClause) throws TskCoreException {
+		caseDb.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = caseDb.getConnection()) {
+			return getAnalysisResultsWhere(whereClause, connection);
+		} finally {
+			caseDb.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get all analysis results matching the given where sub-clause. Uses the
+	 * given database connection to execute the query.
+	 *
+	 * @param whereClause Where sub clause, specifies conditions to match.
+	 * @param connection  Database connection to use.
+	 *
+	 * @return list of analysis results.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core
+	 */
+	List<AnalysisResult> getAnalysisResultsWhere(String whereClause, CaseDbConnection connection) throws TskCoreException {
+
+		final String queryString = ANALYSIS_RESULT_QUERY_STRING
+				+ " AND " + whereClause;
+
+		try (Statement statement = connection.createStatement();
+				ResultSet resultSet = connection.executeQuery(statement, queryString);) {
+
+			List<AnalysisResult> analysisResults = resultSetToAnalysisResults(resultSet);
+			return analysisResults;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting analysis results for WHERE clause = '%s'", whereClause), ex);
+		}
+	}
+
+	/**
+	 * Get the analysis results by its artifact_obj_id.
+	 *
+	 * @param artifactObjId Artifact object id of the analysis result.
+	 *
+	 * @return AnalysisResult.
+	 *
+	 * @throws TskCoreException If a critical error occurred within TSK core.
+	 */
+	public AnalysisResult getAnalysisResultById(long artifactObjId) throws TskCoreException {
+
+		String whereClause = " arts.artifact_obj_id = " + artifactObjId;
+		List<AnalysisResult> results = getAnalysisResultsWhere(whereClause);
+
+		if (results.isEmpty()) { // throw an error if no analysis result found by id.
+			throw new TskCoreException(String.format("Error getting analysis result with id = '%d'", artifactObjId));
+		}
+		if (results.size() > 1) { // should not happen - throw an error
+			throw new TskCoreException(String.format("Multiple analysis results found with id = '%d'", artifactObjId));
+		}
+
+		return results.get(0);
+	}
+
+	/**
+	 * Creates AnalysisResult objects for the result set of a table query of the
+	 * form "SELECT * FROM blackboard_artifacts JOIN WHERE XYZ".
+	 *
+	 * @param rs A result set from a query of the blackboard_artifacts table of
+	 *           the form "SELECT * FROM blackboard_artifacts,
+	 *           tsk_analysis_results WHERE ...".
+	 *
+	 * @return A list of BlackboardArtifact objects.
+	 *
+	 * @throws SQLException     Thrown if there is a problem iterating through
+	 *                          the result set.
+	 * @throws TskCoreException Thrown if there is an error looking up the
+	 *                          artifact type id.
+	 */
+	private List<AnalysisResult> resultSetToAnalysisResults(ResultSet resultSet) throws SQLException, TskCoreException {
+		ArrayList<AnalysisResult> analysisResults = new ArrayList<>();
+
+		while (resultSet.next()) {
+			analysisResults.add(new AnalysisResult(caseDb, resultSet.getLong("artifact_id"), resultSet.getLong("obj_id"),
+					resultSet.getLong("artifact_obj_id"),
+					resultSet.getObject("data_source_obj_id") != null ? resultSet.getLong("data_source_obj_id") : null,
+					resultSet.getInt("artifact_type_id"), resultSet.getString("type_name"), resultSet.getString("display_name"),
+					BlackboardArtifact.ReviewStatus.withID(resultSet.getInt("review_status_id")),
+					new Score(Score.Significance.fromID(resultSet.getInt("significance")), Score.Priority.fromID(resultSet.getInt("priority"))),
+					resultSet.getString("conclusion"), resultSet.getString("configuration"), resultSet.getString("justification")));
+		} //end for each resultSet
+
+		return analysisResults;
+	}
+
+	private final static String DATA_ARTIFACT_QUERY_STRING = "SELECT DISTINCT artifacts.artifact_id AS artifact_id, " //NON-NLS
+			+ "artifacts.obj_id AS obj_id, artifacts.artifact_obj_id AS artifact_obj_id, artifacts.data_source_obj_id AS data_source_obj_id, artifacts.artifact_type_id AS artifact_type_id, " //NON-NLS
+			+ " types.type_name AS type_name, types.display_name AS display_name, types.category_type as category_type,"//NON-NLS
+			+ " artifacts.review_status_id AS review_status_id, " //NON-NLS
+			+ " data_artifacts.os_account_obj_id as os_account_obj_id " //NON-NLS
+			+ " FROM blackboard_artifacts AS artifacts "
+			+ " JOIN blackboard_artifact_types AS types " //NON-NLS
+			+ "		ON artifacts.artifact_type_id = types.artifact_type_id" //NON-NLS
+			+ " LEFT JOIN tsk_data_artifacts AS data_artifacts "
+			+ "		ON artifacts.artifact_obj_id = data_artifacts.artifact_obj_id " //NON-NLS
+			+ " WHERE artifacts.review_status_id != " + BlackboardArtifact.ReviewStatus.REJECTED.getID() //NON-NLS
+			+ "     AND types.category_type = " + BlackboardArtifact.Category.DATA_ARTIFACT.getID(); // NON-NLS
+
+	/**
+	 * Get all data artifacts of a given type for a given data source.
+	 *
+	 * @param artifactTypeID  Artifact type to get.
+	 * @param dataSourceObjId Data source to look under.
+	 *
+	 * @return List of data artifacts. May be an empty list.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<DataArtifact> getDataArtifacts(int artifactTypeID, long dataSourceObjId) throws TskCoreException {
+
+		// Get the artifact type to check that it in the data artifact category.
+		BlackboardArtifact.Type artifactType = caseDb.getArtifactType(artifactTypeID);
+		if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) {
+			throw new TskCoreException(String.format("Artifact type id %d is not in data artifact catgeory.", artifactTypeID));
+		}
+
+		caseDb.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = caseDb.getConnection()) {
+			String whereClause = "artifacts.data_source_obj_id = " + dataSourceObjId
+					+ " AND artifacts.artifact_type_id = " + artifactTypeID;
+
+			return getDataArtifactsWhere(whereClause, connection);
+		} finally {
+			caseDb.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get all data artifacts of a given type.
+	 *
+	 * @param artifactTypeID Artifact type to get.
+	 *
+	 * @return List of data artifacts. May be an empty list.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public List<DataArtifact> getDataArtifacts(int artifactTypeID) throws TskCoreException {
+		// Get the artifact type to check that it in the data artifact category.
+		BlackboardArtifact.Type artifactType = caseDb.getArtifactType(artifactTypeID);
+		if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) {
+			throw new TskCoreException(String.format("Artifact type id %d is not in data artifact catgeory.", artifactTypeID));
+		}
+
+		caseDb.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = caseDb.getConnection()) {
+			String whereClause = " artifacts.artifact_type_id = " + artifactTypeID;
+
+			return getDataArtifactsWhere(whereClause, connection);
+		} finally {
+			caseDb.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get the data artifact with the given artifact obj id.
+	 *
+	 * @param artifactObjId Object id of the data artifact to get.
+	 *
+	 * @return Data artifact with given artifact object id.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	public DataArtifact getDataArtifactById(long artifactObjId) throws TskCoreException {
+		caseDb.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = caseDb.getConnection()) {
+			String whereClause = " artifacts.artifact_obj_id = " + artifactObjId;
+
+			List<DataArtifact> artifacts = getDataArtifactsWhere(whereClause, connection);
+			if (artifacts.isEmpty()) { // throw an error if no analysis result found by id.
+				throw new TskCoreException(String.format("Error getting data artifact with id = '%d'", artifactObjId));
+			}
+			if (artifacts.size() > 1) { // should not happen - throw an error
+				throw new TskCoreException(String.format("Multiple data artifacts found with id = '%d'", artifactObjId));
+			}
+
+			return artifacts.get(0);
+		} finally {
+			caseDb.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get all data artifacts matching the given where sub-clause. Uses the
+	 * given database connection to execute the query.
+	 *
+	 * @param whereClause SQL Where sub-clause, specifies conditions to match.
+	 * @param connection  Database connection to use.
+	 *
+	 * @return List of data artifacts. May be an empty list.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 *                          within TSK core.
+	 */
+	private List<DataArtifact> getDataArtifactsWhere(String whereClause, CaseDbConnection connection) throws TskCoreException {
+
+		final String queryString = DATA_ARTIFACT_QUERY_STRING
+				+ " AND ( " + whereClause + " )";
+
+		try (Statement statement = connection.createStatement();
+				ResultSet resultSet = connection.executeQuery(statement, queryString);) {
+
+			List<DataArtifact> dataArtifacts = resultSetToDataArtifacts(resultSet, connection);
+			return dataArtifacts;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting data artifacts with queryString = %s", queryString), ex);
+		}
+	}
+
+	/**
+	 * Creates DataArtifacts objects for the resultset of a table query of the
+	 * form "SELECT * FROM blackboard_artifacts JOIN data_artifacts WHERE ...".
+	 *
+	 * @param resultSet  A result set from a query of the blackboard_artifacts
+	 *                   table of the form "SELECT * FROM blackboard_artifacts,
+	 *                   tsk_data_artifacts WHERE ...".
+	 * @param connection Database connection.
+	 *
+	 * @return A list of DataArtifact objects.
+	 *
+	 * @throws SQLException     Thrown if there is a problem iterating through
+	 *                          the result set.
+	 * @throws TskCoreException Thrown if there is an error looking up the
+	 *                          artifact type id.
+	 */
+	private List<DataArtifact> resultSetToDataArtifacts(ResultSet resultSet, CaseDbConnection connection) throws SQLException, TskCoreException {
+		ArrayList<DataArtifact> dataArtifacts = new ArrayList<>();
+
+		while (resultSet.next()) {
+
+			Long osAccountObjId = resultSet.getLong("os_account_obj_id");
+			if (resultSet.wasNull()) {
+				osAccountObjId = null;
+			}
+
+			dataArtifacts.add(new DataArtifact(caseDb, resultSet.getLong("artifact_id"), resultSet.getLong("obj_id"),
+					resultSet.getLong("artifact_obj_id"),
+					resultSet.getObject("data_source_obj_id") != null ? resultSet.getLong("data_source_obj_id") : null,
+					resultSet.getInt("artifact_type_id"), resultSet.getString("type_name"), resultSet.getString("display_name"),
+					BlackboardArtifact.ReviewStatus.withID(resultSet.getInt("review_status_id")), osAccountObjId, false));
+		} //end for each resultSet
+
+		return dataArtifacts;
+	}
+
+	/**
+	 * Get the artifact type associated with an artifact type id.
+	 *
+	 * @param artTypeId An artifact type id.
+	 *
+	 * @return The artifact type.
+	 *
+	 * @throws TskCoreException If an error occurs accessing the case database 
+	 *						    or no value is found.
+	 *
+	 */
+	public BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException {
+		return caseDb.getArtifactType(artTypeId);
+	}
+	
 	/**
 	 * Gets an attribute type, creating it if it does not already exist. Use
 	 * this method to define custom attribute types.
@@ -165,7 +836,9 @@ public BlackboardAttribute.Type getOrAddAttributeType(String typeName, Blackboar
 	public List<BlackboardArtifact.Type> getArtifactTypesInUse(long dataSourceObjId) throws TskCoreException {
 
 		final String queryString = "SELECT DISTINCT arts.artifact_type_id AS artifact_type_id, "
-				+ "types.type_name AS type_name, types.display_name AS display_name "
+				+ "types.type_name AS type_name, "
+				+ "types.display_name AS display_name, "
+				+ "types.category_type AS category_type "
 				+ "FROM blackboard_artifact_types AS types "
 				+ "INNER JOIN blackboard_artifacts AS arts "
 				+ "ON arts.artifact_type_id = types.artifact_type_id "
@@ -179,7 +852,8 @@ public List<BlackboardArtifact.Type> getArtifactTypesInUse(long dataSourceObjId)
 			List<BlackboardArtifact.Type> uniqueArtifactTypes = new ArrayList<>();
 			while (resultSet.next()) {
 				uniqueArtifactTypes.add(new BlackboardArtifact.Type(resultSet.getInt("artifact_type_id"),
-						resultSet.getString("type_name"), resultSet.getString("display_name")));
+						resultSet.getString("type_name"), resultSet.getString("display_name"), 
+						BlackboardArtifact.Category.fromID(resultSet.getInt("category_type"))));
 			}
 			return uniqueArtifactTypes;
 		} catch (SQLException ex) {
@@ -222,12 +896,12 @@ public List<BlackboardArtifact> getArtifacts(int artifactTypeID, long dataSource
 		return caseDb.getArtifactsHelper("blackboard_artifacts.data_source_obj_id = " + dataSourceObjId
 				+ " AND blackboard_artifact_types.artifact_type_id = " + artifactTypeID + ";");
 	}
-	
+
 	/**
-	 * Get all blackboard artifacts of the given type(s) for the given data source(s). Does not included rejected
-	 * artifacts.
+	 * Get all blackboard artifacts of the given type(s) for the given data
+	 * source(s). Does not included rejected artifacts.
 	 *
-	 * @param artifactTypes  list of artifact types to get
+	 * @param artifactTypes    list of artifact types to get
 	 * @param dataSourceObjIds data sources to look under
 	 *
 	 * @return list of blackboard artifacts
@@ -235,13 +909,13 @@ public List<BlackboardArtifact> getArtifacts(int artifactTypeID, long dataSource
 	 * @throws TskCoreException exception thrown if a critical error occurs
 	 *                          within TSK core
 	 */
-	public List<BlackboardArtifact> getArtifacts(Collection<BlackboardArtifact.Type> artifactTypes, 
+	public List<BlackboardArtifact> getArtifacts(Collection<BlackboardArtifact.Type> artifactTypes,
 			Collection<Long> dataSourceObjIds) throws TskCoreException {
-		
+
 		if (artifactTypes.isEmpty() || dataSourceObjIds.isEmpty()) {
 			return new ArrayList<>();
 		}
-		
+
 		String typeQuery = "";
 		for (BlackboardArtifact.Type type : artifactTypes) {
 			if (!typeQuery.isEmpty()) {
@@ -249,7 +923,7 @@ public List<BlackboardArtifact> getArtifacts(Collection<BlackboardArtifact.Type>
 			}
 			typeQuery += "blackboard_artifact_types.artifact_type_id = " + type.getTypeID();
 		}
-		
+
 		String dsQuery = "";
 		for (long dsId : dataSourceObjIds) {
 			if (!dsQuery.isEmpty()) {
@@ -257,11 +931,11 @@ public List<BlackboardArtifact> getArtifacts(Collection<BlackboardArtifact.Type>
 			}
 			dsQuery += "blackboard_artifacts.data_source_obj_id = " + dsId;
 		}
-		
+
 		String fullQuery = "( " + typeQuery + " ) AND ( " + dsQuery + " );";
-		
+
 		return caseDb.getArtifactsHelper(fullQuery);
-	}	
+	}
 
 	/**
 	 * Gets count of blackboard artifacts of given type that match a given WHERE
@@ -359,7 +1033,6 @@ private boolean attributesMatch(Collection<BlackboardAttribute> fileAttributesLi
 			boolean match = false;
 			for (BlackboardAttribute fileAttribute : fileAttributesList) {
 				BlackboardAttribute.Type attributeType = fileAttribute.getAttributeType();
-
 				if (attributeType.getTypeID() != expectedAttribute.getAttributeType().getTypeID()) {
 					continue;
 				}
@@ -425,6 +1098,7 @@ private boolean attributesMatch(Collection<BlackboardAttribute> fileAttributesLi
 
 	}
 
+
 	/**
 	 * A Blackboard exception.
 	 */
@@ -453,6 +1127,110 @@ public static final class BlackboardException extends Exception {
 		}
 	}
 
+	/**
+	 * Add a new data artifact with the given type.
+	 *
+	 * @param artifactType    The type of the data artifact.
+	 * @param sourceObjId     The content that is the source of this artifact.
+	 * @param dataSourceObjId The data source the artifact source content
+	 *                        belongs to, may be the same as the sourceObjId.
+	 *                        May be null.
+	 * @param attributes      The attributes. May be empty or null.
+	 * @param osAccountId     The OS account id associated with the artifact.
+	 *                        May be null.
+	 *
+	 * @return DataArtifact A new data artifact.
+	 *
+	 * @throws TskCoreException If a critical error occurs within tsk core.
+	 */
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId,
+			Collection<BlackboardAttribute> attributes, Long osAccountId) throws TskCoreException {
+
+		if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) {
+			throw new TskCoreException(String.format("Artifact type (name = %s) is not of Data Artifact category. ", artifactType.getTypeName()));
+		}
+
+		CaseDbTransaction transaction = caseDb.beginTransaction();
+		try {
+			DataArtifact dataArtifact = newDataArtifact(artifactType, sourceObjId, dataSourceObjId,
+					attributes, osAccountId, transaction);
+			transaction.commit();
+			return dataArtifact;
+		} catch (TskCoreException ex) {
+			try {
+				transaction.rollback();
+			} catch (TskCoreException ex2) {
+				LOGGER.log(Level.SEVERE, "Failed to rollback transaction after exception. "
+						+ "Error invoking newDataArtifact with dataSourceObjId: " + dataSourceObjId + ",  sourceObjId: " + sourceObjId, ex2);
+			}
+			throw ex;
+		}
+	}
+
+	/**
+	 * Add a new data artifact with the given type.
+	 *
+	 * This api executes in the context of the given transaction.
+	 *
+	 * @param artifactType    The type of the data artifact.
+	 * @param sourceObjId     The content that is the source of this artifact.
+	 * @param dataSourceObjId The data source the artifact source content
+	 *                        belongs to, may be the same as the sourceObjId.
+	 *                        May be null.
+	 * @param attributes      The attributes. May be empty or null.
+	 * @param osAccountObjId  The OS account associated with the artifact. May
+	 *                        be null.
+	 * @param transaction     The transaction in the scope of which the
+	 *                        operation is to be performed.
+	 *
+	 * @return DataArtifact New blackboard artifact
+	 *
+	 * @throws TskCoreException If a critical error occurs within tsk core.
+	 */
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, long sourceObjId, Long dataSourceObjId,
+			Collection<BlackboardAttribute> attributes, Long osAccountObjId, final CaseDbTransaction transaction) throws TskCoreException {
+
+		if (artifactType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) {
+			throw new TskCoreException(String.format("Artifact type (name = %s) is not of Data Artifact category. ", artifactType.getTypeName()));
+		}
+
+		try {
+			CaseDbConnection connection = transaction.getConnection();
+			long artifact_obj_id = caseDb.addObject(sourceObjId, TskData.ObjectType.ARTIFACT.getObjectType(), connection);
+			PreparedStatement statement = caseDb.createInsertArtifactStatement(artifactType.getTypeID(), sourceObjId, artifact_obj_id, dataSourceObjId, connection);
+
+			connection.executeUpdate(statement);
+			try (ResultSet resultSet = statement.getGeneratedKeys()) {
+				resultSet.next();
+				DataArtifact dataArtifact = new DataArtifact(caseDb, resultSet.getLong(1), //last_insert_rowid()
+						sourceObjId, artifact_obj_id, dataSourceObjId, artifactType.getTypeID(),
+						artifactType.getTypeName(), artifactType.getDisplayName(), BlackboardArtifact.ReviewStatus.UNDECIDED,
+						osAccountObjId, true);
+
+				// Add a row in tsk_data_artifact if the os account is present
+				if (osAccountObjId != null) {
+					String insertDataArtifactSQL = "INSERT INTO tsk_data_artifacts (artifact_obj_id, os_account_obj_id) VALUES (?, ?)";
+
+					statement = connection.getPreparedStatement(insertDataArtifactSQL, Statement.NO_GENERATED_KEYS);
+					statement.clearParameters();
+
+					statement.setLong(1, artifact_obj_id);
+					statement.setLong(2, osAccountObjId);
+					connection.executeUpdate(statement);
+				}
+
+				// if attributes are provided, add them to the artifact.
+				if (Objects.nonNull(attributes) && !attributes.isEmpty()) {
+					dataArtifact.addAttributes(attributes, transaction);
+				}
+
+				return dataArtifact;
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error creating a data artifact with type id = %d, objId = %d, and data source oj id = %d ", artifactType.getTypeID(), sourceObjId, dataSourceObjId), ex);
+		}
+	}
+
 	/**
 	 * Event published by SleuthkitCase when one or more artifacts are posted. A
 	 * posted artifact is complete (all attributes have been added) and ready
@@ -483,7 +1261,7 @@ private ArtifactsPostedEvent(Collection<BlackboardArtifact> artifacts, String mo
 		}
 
 		public Collection<BlackboardArtifact> getArtifacts() {
-			return artifacts;
+			return ImmutableSet.copyOf(artifacts);
 		}
 
 		public Collection<BlackboardArtifact> getArtifacts(BlackboardArtifact.Type artifactType) {
@@ -498,7 +1276,7 @@ public String getModuleName() {
 		}
 
 		public Collection<BlackboardArtifact.Type> getArtifactTypes() {
-			return artifactTypes;
+			return ImmutableSet.copyOf(artifactTypes);
 		}
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java b/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java
index 591f0fae8fa3af11356098f12b57cf6314f1a4f1..a1d8c218a8656ad86d496b3887db607fc857860f 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/BlackboardArtifact.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2011-2020 Basis Technology Corp.
+ * Copyright 2011-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -20,9 +20,12 @@
 
 import java.io.Serializable;
 import java.io.UnsupportedEncodingException;
+import java.sql.SQLException;
 import java.text.MessageFormat;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
@@ -30,8 +33,12 @@
 import java.util.Objects;
 import java.util.ResourceBundle;
 import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 import org.sleuthkit.datamodel.SleuthkitCase.ObjectInfo;
 
 /**
@@ -51,7 +58,7 @@ public class BlackboardArtifact implements Content {
 	private final long artifactId;
 	private final long sourceObjId;				// refers to objID of parent/source object
 	private final long artifactObjId;			// objId of the artifact in tsk_objects. TBD: replace artifactID with this
-	private final long dataSourceObjId;			// objId of the data source in tsk_objects.
+	private final Long dataSourceObjId;			// objId of the data source in tsk_objects.
 	private final int artifactTypeId;
 	private final String artifactTypeName;
 	private final String displayName;
@@ -77,18 +84,18 @@ public class BlackboardArtifact implements Content {
 	 *
 	 * @param sleuthkitCase    The SleuthKit case (case database) that contains
 	 *                         the artifact data.
-	 * @param artifactID       The unique id for this artifact
+	 * @param artifactID       The unique id for this artifact.
 	 * @param sourceObjId      The unique id of the content with which this
 	 *                         artifact is associated.
-	 * @param artifactObjId    The unique id this artifact, in tsk_objects
+	 * @param artifactObjId    The unique id this artifact, in tsk_objects.
 	 * @param dataSourceObjId  Object ID of the datasource where the artifact
-	 *                         was found.
+	 *                         was found. May be null.
 	 * @param artifactTypeID   The type id of this artifact.
 	 * @param artifactTypeName The type name of this artifact.
 	 * @param displayName      The display name of this artifact.
 	 * @param reviewStatus     The review status of this artifact.
 	 */
-	BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus) {
+	BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, Long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus) {
 
 		this.sleuthkitCase = sleuthkitCase;
 		this.artifactId = artifactID;
@@ -124,8 +131,9 @@ public class BlackboardArtifact implements Content {
 	 * @param artifactTypeName The type name of this artifact.
 	 * @param displayName      The display name of this artifact.
 	 * @param reviewStatus     The review status of this artifact.
+	 * @param isNew            If the artifact is newly created.
 	 */
-	BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjID, long dataSourceObjID, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, boolean isNew) {
+	BlackboardArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjID, Long dataSourceObjID, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, boolean isNew) {
 		this(sleuthkitCase, artifactID, sourceObjId, artifactObjID, dataSourceObjID, artifactTypeID, artifactTypeName, displayName, reviewStatus);
 		if (isNew) {
 			/*
@@ -169,9 +177,9 @@ public long getObjectID() {
 	/**
 	 * Gets the object id of the data source for this artifact.
 	 *
-	 * @return The data source object id.
+	 * @return The data source object id, may be null.
 	 */
-	long getDataSourceObjectID() {
+	Long getDataSourceObjectID() {
 		return this.dataSourceObjId;
 	}
 
@@ -183,6 +191,20 @@ long getDataSourceObjectID() {
 	public int getArtifactTypeID() {
 		return this.artifactTypeId;
 	}
+	
+	/**
+	 * Gets the artifact type for this artifact.
+	 * 
+	 * @return The artifact type.
+	 */
+	public BlackboardArtifact.Type getType() throws TskCoreException {
+		BlackboardArtifact.Type standardTypesValue = BlackboardArtifact.Type.STANDARD_TYPES.get(getArtifactTypeID());
+		if (standardTypesValue != null) {
+			return standardTypesValue;
+		} else {
+			return getSleuthkitCase().getArtifactType(getArtifactTypeID());
+		}
+	}
 
 	/**
 	 * Gets the artifact type name for this artifact.
@@ -240,7 +262,7 @@ public String getShortDescription() throws TskCoreException {
 					ATTRIBUTE_TYPE.TSK_EMAIL_FROM,
 					ATTRIBUTE_TYPE.TSK_EMAIL_TO,
 					ATTRIBUTE_TYPE.TSK_EMAIL_HOME,
-					ATTRIBUTE_TYPE.TSK_EMAIL_OFFICE}; //in the order we want to use them 
+					ATTRIBUTE_TYPE.TSK_EMAIL_OFFICE}; //in the order we want to use them
 				for (ATTRIBUTE_TYPE t : typesThatCanHaveName) {
 					attr = getAttribute(new BlackboardAttribute.Type(t));
 					if (attr != null && !attr.getDisplayString().isEmpty()) {
@@ -264,7 +286,7 @@ public String getShortDescription() throws TskCoreException {
 			ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED,
 			ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
 			ATTRIBUTE_TYPE.TSK_DATETIME_START,
-			ATTRIBUTE_TYPE.TSK_DATETIME_END};  //in the order we want to use them 
+			ATTRIBUTE_TYPE.TSK_DATETIME_END};  //in the order we want to use them
 		BlackboardAttribute date;
 		for (ATTRIBUTE_TYPE t : typesThatCanHaveDate) {
 			date = getAttribute(new BlackboardAttribute.Type(t));
@@ -384,6 +406,41 @@ public void addAttributes(Collection<BlackboardAttribute> attributes) throws Tsk
 		attrsCache.addAll(attributes);
 	}
 
+	/**
+	 * Adds a collection of attributes to this artifact in a single operation
+	 * (faster than adding each attribute individually) within a transaction
+	 * supplied by the caller.
+	 *
+	 * @param attributes        The collection of attributes.
+	 * @param caseDbTransaction The transaction in the scope of which the
+	 *                          operation is to be performed, managed by the
+	 *                          caller. Null is not permitted.
+	 *
+	 * @throws TskCoreException If an error occurs and the attributes were not
+	 *                          added to the artifact. If
+	 *                          <code>caseDbTransaction</code> is null or if
+	 *                          <code>attributes</code> is null or empty.
+	 */
+	public void addAttributes(Collection<BlackboardAttribute> attributes, final SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException {
+
+		if (Objects.isNull(attributes) || attributes.isEmpty()) {
+			throw new TskCoreException("Illegal argument passed to addAttributes: null or empty attributes passed to addAttributes");
+		}
+		if (Objects.isNull(caseDbTransaction)) {
+			throw new TskCoreException("Illegal argument passed to addAttributes: null caseDbTransaction passed to addAttributes");
+		}
+		try {
+			for (final BlackboardAttribute attribute : attributes) {
+				attribute.setArtifactId(artifactId);
+				attribute.setCaseDatabase(getSleuthkitCase());
+				getSleuthkitCase().addBlackBoardAttribute(attribute, artifactTypeId, caseDbTransaction.getConnection());
+			}
+			attrsCache.addAll(attributes);
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error adding blackboard attributes", ex);
+		}
+	}
+
 	/**
 	 * This overiding implementation returns the unique path of the parent. It
 	 * does not include the Artifact name in the unique path.
@@ -401,7 +458,7 @@ public String getUniquePath() throws TskCoreException {
 			if (myParent != null) {
 				tempUniquePath = myParent.getUniquePath();
 			}
-			
+
 			// Don't update uniquePath until it is complete.
 			uniquePath = tempUniquePath;
 		}
@@ -437,6 +494,27 @@ public ArrayList<BlackboardArtifact> getAllArtifacts() throws TskCoreException {
 		return new ArrayList<BlackboardArtifact>();
 	}
 
+	@Override
+	public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
+		return sleuthkitCase.getBlackboard().getAnalysisResults(artifactObjId);
+	}
+
+	@Override
+	public List<DataArtifact> getAllDataArtifacts() throws TskCoreException {
+		return sleuthkitCase.getBlackboard().getDataArtifactsBySource(artifactObjId);
+	}
+	
+	@Override
+	public Score getAggregateScore() throws TskCoreException {
+		return sleuthkitCase.getScoringManager().getAggregateScore(artifactObjId);
+
+	}
+
+	@Override
+	public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException {
+		return sleuthkitCase.getBlackboard().getAnalysisResults(artifactObjId, artifactType.getTypeID()); //NON-NLS
+	}
+
 	/**
 	 * Get all artifacts associated with this content that have the given type
 	 * name
@@ -621,12 +699,57 @@ public Set<String> getHashSetNames() throws TskCoreException {
 	 *         looked up from this)
 	 *
 	 * @throws TskCoreException if critical error occurred within tsk core
+	 * @deprecated Use the Blackboard to create Data Artifacts and Analysis Results.
 	 */
+	@Deprecated
 	@Override
 	public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
 		throw new TskCoreException("Cannot create artifact of an artifact. Not supported.");
 	}
 
+	@Override
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+		CaseDbTransaction trans = sleuthkitCase.beginTransaction();
+		try {
+			AnalysisResultAdded resultAdded = sleuthkitCase.getBlackboard().newAnalysisResult(artifactType, this.getObjectID(), this.getDataSource().getId(), score, conclusion, configuration, justification, attributesList, trans);
+
+			trans.commit();
+			return resultAdded;
+		} catch (BlackboardException ex) {
+			trans.rollback();
+			throw new TskCoreException("Error adding analysis result.", ex);
+		}
+	}
+
+	@Override
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, long dataSourceId) throws TskCoreException {
+		CaseDbTransaction trans = sleuthkitCase.beginTransaction();
+		try {
+			AnalysisResultAdded resultAdded = sleuthkitCase.getBlackboard().newAnalysisResult(artifactType, this.getObjectID(), dataSourceId, score, conclusion, configuration, justification, attributesList, trans);
+
+			trans.commit();
+			return resultAdded;
+		} catch (BlackboardException ex) {
+			trans.rollback();
+			throw new TskCoreException("Error adding analysis result.", ex);
+		}
+	}
+
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
+		throw new TskCoreException("Cannot create data artifact of an artifact. Not supported.");
+	}
+	
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId, long dataSourceId) throws TskCoreException {
+		throw new TskCoreException("Cannot create data artifact of an artifact. Not supported.");
+	}
+	
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+		return newDataArtifact(artifactType, attributesList, null);
+	}
+
 	/**
 	 * Create and add an artifact associated with this content to the blackboard
 	 *
@@ -636,7 +759,9 @@ public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreExceptio
 	 *         looked up from this)
 	 *
 	 * @throws TskCoreException if critical error occurred within tsk core
+	 * @deprecated Use the Blackboard to create Data Artifacts and Analysis Results.
 	 */
+	@Deprecated
 	@Override
 	public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
 		throw new TskCoreException("Cannot create artifact of an artifact. Not supported.");
@@ -777,7 +902,7 @@ public String getName() {
 
 	@Override
 	public Content getDataSource() throws TskCoreException {
-		return getSleuthkitCase().getContentById(dataSourceObjId);
+		return dataSourceObjId != null ? getSleuthkitCase().getContentById(dataSourceObjId) : null;
 	}
 
 	/**
@@ -824,21 +949,431 @@ private void loadArtifactContent() throws TskCoreException {
 	public static final class Type implements Serializable {
 
 		private static final long serialVersionUID = 1L;
+
+		/**
+		 * A generic information artifact.
+		 */
+		public static final Type TSK_GEN_INFO = new BlackboardArtifact.Type(1, "TSK_GEN_INFO", bundle.getString("BlackboardArtifact.tskGenInfo.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A Web bookmark. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
+		 * create bookmark artifacts.
+		 */
+		public static final Type TSK_WEB_BOOKMARK = new BlackboardArtifact.Type(2, "TSK_WEB_BOOKMARK", bundle.getString("BlackboardArtifact.tskWebBookmark.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A Web cookie. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
+		 * create cookie artifacts.
+		 */
+		public static final Type TSK_WEB_COOKIE = new BlackboardArtifact.Type(3, "TSK_WEB_COOKIE", bundle.getString("BlackboardArtifact.tskWebCookie.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A Web history. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
+		 * create history artifacts.
+		 */
+		public static final Type TSK_WEB_HISTORY = new BlackboardArtifact.Type(4, "TSK_WEB_HISTORY", bundle.getString("BlackboardArtifact.tskWebHistory.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A Web download. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
+		 * create download artifacts.
+		 */
+		public static final Type TSK_WEB_DOWNLOAD = new BlackboardArtifact.Type(5, "TSK_WEB_DOWNLOAD", bundle.getString("BlackboardArtifact.tskWebDownload.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A recent object.
+		 */
+		public static final Type TSK_RECENT_OBJECT = new BlackboardArtifact.Type(6, "TSK_RECENT_OBJ", bundle.getString("BlackboardArtifact.tsk.recentObject.text"), Category.DATA_ARTIFACT);
+
+		// 7 was used for deprecated TSK_GPS_TRACKPOINT. 
+		/**
+		 * An installed program.
+		 */
+		public static final Type TSK_INSTALLED_PROG = new BlackboardArtifact.Type(8, "TSK_INSTALLED_PROG", bundle.getString("BlackboardArtifact.tskInstalledProg.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A search hit for a keyword.
+		 */
+		public static final Type TSK_KEYWORD_HIT = new BlackboardArtifact.Type(9, "TSK_KEYWORD_HIT", bundle.getString("BlackboardArtifact.tskKeywordHits.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * A hit for a hash set (hash database).
+		 */
+		public static final Type TSK_HASHSET_HIT = new BlackboardArtifact.Type(10, "TSK_HASHSET_HIT", bundle.getString("BlackboardArtifact.tskHashsetHit.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * An attached device.
+		 */
+		public static final Type TSK_DEVICE_ATTACHED = new BlackboardArtifact.Type(11, "TSK_DEVICE_ATTACHED", bundle.getString("BlackboardArtifact.tskDeviceAttached.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * An meta-artifact to call attention to a file deemed to be
+		 * interesting.
+		 */
+		public static final Type TSK_INTERESTING_FILE_HIT = new BlackboardArtifact.Type(12, "TSK_INTERESTING_FILE_HIT", bundle.getString("BlackboardArtifact.tskInterestingFileHit.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * An email message.
+		 */
+		public static final Type TSK_EMAIL_MSG = new BlackboardArtifact.Type(13, "TSK_EMAIL_MSG", bundle.getString("BlackboardArtifact.tskEmailMsg.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Text extracted from the source content.
+		 */
+		public static final Type TSK_EXTRACTED_TEXT = new BlackboardArtifact.Type(14, "TSK_EXTRACTED_TEXT", bundle.getString("BlackboardArtifact.tskExtractedText.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A Web search engine query extracted from Web history.
+		 */
+		public static final Type TSK_WEB_SEARCH_QUERY = new BlackboardArtifact.Type(15, "TSK_WEB_SEARCH_QUERY", bundle.getString("BlackboardArtifact.tskWebSearchQuery.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * EXIF metadata.
+		 */
+		public static final Type TSK_METADATA_EXIF = new BlackboardArtifact.Type(16, "TSK_METADATA_EXIF", bundle.getString("BlackboardArtifact.tskMetadataExif.text"), Category.ANALYSIS_RESULT);
+
+		// 17 was used for deprecated TSK_TAG_FILE. 
+		// 18 was used for deprecated TSK_TAG_ARTIFACT. 
+		/**
+		 * Information pertaining to an operating system.
+		 */
+		public static final Type TSK_OS_INFO = new BlackboardArtifact.Type(19, "TSK_OS_INFO", bundle.getString("BlackboardArtifact.tskOsInfo.text"), Category.DATA_ARTIFACT);
+
+		// 20 was used for deprecated TSK_OS_ACCOUNT.
+		/**
+		 * An application or Web service account.
+		 */
+		public static final Type TSK_SERVICE_ACCOUNT = new BlackboardArtifact.Type(21, "TSK_SERVICE_ACCOUNT", bundle.getString("BlackboardArtifact.tskServiceAccount.text"), Category.DATA_ARTIFACT);
+
+		// 22 was used for deprecated TSK_TOOL_OUTPUT.
+		/**
+		 * A contact extracted from a phone, or from an address
+		 * book/email/messaging application. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper
+		 * to create contact artifacts.
+		 */
+		public static final Type TSK_CONTACT = new BlackboardArtifact.Type(23, "TSK_CONTACT", bundle.getString("BlackboardArtifact.tskContact.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * An SMS/MMS message extracted from phone, or from another messaging
+		 * application, like IM. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper
+		 * to create message artifacts.
+		 */
+		public static final Type TSK_MESSAGE = new BlackboardArtifact.Type(24, "TSK_MESSAGE", bundle.getString("BlackboardArtifact.tskMessage.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A phone call log extracted from a phone or softphone application. Use
+		 * methods in
+		 * org.sleuthkit.datamodel.blackboardutils.CommunicationArtifactsHelper
+		 * to create call log artifacts.
+		 */
+		public static final Type TSK_CALLLOG = new BlackboardArtifact.Type(25, "TSK_CALLLOG", bundle.getString("BlackboardArtifact.tskCalllog.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A calendar entry from a phone, PIM, or a calendar application.
+		 */
+		public static final Type TSK_CALENDAR_ENTRY = new BlackboardArtifact.Type(26, "TSK_CALENDAR_ENTRY", bundle.getString("BlackboardArtifact.tskCalendarEntry.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A speed dial entry from a phone.
+		 */
+		public static final Type TSK_SPEED_DIAL_ENTRY = new BlackboardArtifact.Type(27, "TSK_SPEED_DIAL_ENTRY", bundle.getString("BlackboardArtifact.tskSpeedDialEntry.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A bluetooth pairing entry.
+		 */
+		public static final Type TSK_BLUETOOTH_PAIRING = new BlackboardArtifact.Type(28, "TSK_BLUETOOTH_PAIRING", bundle.getString("BlackboardArtifact.tskBluetoothPairing.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A GPS bookmark / way point that the user saved.
+		 */
+		public static final Type TSK_GPS_BOOKMARK = new BlackboardArtifact.Type(29, "TSK_GPS_BOOKMARK", bundle.getString("BlackboardArtifact.tskGpsBookmark.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A GPS last known location record.
+		 */
+		public static final Type TSK_GPS_LAST_KNOWN_LOCATION = new BlackboardArtifact.Type(30, "TSK_GPS_LAST_KNOWN_LOCATION", bundle.getString("BlackboardArtifact.tskGpsLastKnownLocation.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A GPS search record.
+		 */
+		public static final Type TSK_GPS_SEARCH = new BlackboardArtifact.Type(31, "TSK_GPS_SEARCH", bundle.getString("BlackboardArtifact.tskGpsSearch.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Application run information.
+		 */
+		public static final Type TSK_PROG_RUN = new BlackboardArtifact.Type(32, "TSK_PROG_RUN", bundle.getString("BlackboardArtifact.tskProgRun.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * An encrypted file.
+		 */
+		public static final Type TSK_ENCRYPTION_DETECTED = new BlackboardArtifact.Type(33, "TSK_ENCRYPTION_DETECTED", bundle.getString("BlackboardArtifact.tskEncryptionDetected.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * A file with an extension that does not match its MIME type.
+		 */
+		public static final Type TSK_EXT_MISMATCH_DETECTED = new BlackboardArtifact.Type(34, "TSK_EXT_MISMATCH_DETECTED", bundle.getString("BlackboardArtifact.tskExtMismatchDetected.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * An meta-artifact to call attention to an artifact deemed to be
+		 * interesting.
+		 */
+		public static final Type TSK_INTERESTING_ARTIFACT_HIT = new BlackboardArtifact.Type(35, "TSK_INTERESTING_ARTIFACT_HIT", bundle.getString("BlackboardArtifact.tskInterestingArtifactHit.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * A route based on GPS coordinates. Use
+		 * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addRoute()
+		 * to create route artifacts.
+		 */
+		public static final Type TSK_GPS_ROUTE = new BlackboardArtifact.Type(36, "TSK_GPS_ROUTE", bundle.getString("BlackboardArtifact.tskGpsRoute.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A remote drive.
+		 */
+		public static final Type TSK_REMOTE_DRIVE = new BlackboardArtifact.Type(37, "TSK_REMOTE_DRIVE", bundle.getString("BlackboardArtifact.tskRemoteDrive.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A human face was detected in a media file.
+		 */
+		public static final Type TSK_FACE_DETECTED = new BlackboardArtifact.Type(38, "TSK_FACE_DETECTED", bundle.getString("BlackboardArtifact.tskFaceDetected.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * An account.
+		 */
+		public static final Type TSK_ACCOUNT = new BlackboardArtifact.Type(39, "TSK_ACCOUNT", bundle.getString("BlackboardArtifact.tskAccount.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * An encrypted file.
+		 */
+		public static final Type TSK_ENCRYPTION_SUSPECTED = new BlackboardArtifact.Type(40, "TSK_ENCRYPTION_SUSPECTED", bundle.getString("BlackboardArtifact.tskEncryptionSuspected.text"), Category.ANALYSIS_RESULT);
+
+		/*
+		 * A classifier detected an object in a media file.
+		 */
+		public static final Type TSK_OBJECT_DETECTED = new BlackboardArtifact.Type(41, "TSK_OBJECT_DETECTED", bundle.getString("BlackboardArtifact.tskObjectDetected.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * A wireless network.
+		 */
+		public static final Type TSK_WIFI_NETWORK = new BlackboardArtifact.Type(42, "TSK_WIFI_NETWORK", bundle.getString("BlackboardArtifact.tskWIFINetwork.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Information related to a device.
+		 */
+		public static final Type TSK_DEVICE_INFO = new BlackboardArtifact.Type(43, "TSK_DEVICE_INFO", bundle.getString("BlackboardArtifact.tskDeviceInfo.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A SIM card.
+		 */
+		public static final Type TSK_SIM_ATTACHED = new BlackboardArtifact.Type(44, "TSK_SIM_ATTACHED", bundle.getString("BlackboardArtifact.tskSimAttached.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A bluetooth adapter.
+		 */
+		public static final Type TSK_BLUETOOTH_ADAPTER = new BlackboardArtifact.Type(45, "TSK_BLUETOOTH_ADAPTER", bundle.getString("BlackboardArtifact.tskBluetoothAdapter.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A wireless network adapter.
+		 */
+		public static final Type TSK_WIFI_NETWORK_ADAPTER = new BlackboardArtifact.Type(46, "TSK_WIFI_NETWORK_ADAPTER", bundle.getString("BlackboardArtifact.tskWIFINetworkAdapter.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Indicates a verification failure
+		 */
+		public static final Type TSK_VERIFICATION_FAILED = new BlackboardArtifact.Type(47, "TSK_VERIFICATION_FAILED", bundle.getString("BlackboardArtifact.tskVerificationFailed.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * Categorization information for a data source.
+		 */
+		public static final Type TSK_DATA_SOURCE_USAGE = new BlackboardArtifact.Type(48, "TSK_DATA_SOURCE_USAGE", bundle.getString("BlackboardArtifact.tskDataSourceUsage.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * Indicates auto fill data from a Web form. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
+		 * create web form autofill artifacts.
+		 */
+		public static final Type TSK_WEB_FORM_AUTOFILL = new BlackboardArtifact.Type(49, "TSK_WEB_FORM_AUTOFILL", bundle.getString("BlackboardArtifact.tskWebFormAutofill.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Indicates an person's address filled in a web form. Use methods in
+		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
+		 * create web form address artifacts.
+		 */
+		public static final Type TSK_WEB_FORM_ADDRESS = new BlackboardArtifact.Type(50, "TSK_WEB_FORM_ADDRESSES ", bundle.getString("BlackboardArtifact.tskWebFormAddresses.text"), Category.DATA_ARTIFACT);
+
+		// 51 was used for deprecated TSK_DOWNLOAD_SOURCE
+		/**
+		 * Indicates web cache data
+		 */
+		public static final Type TSK_WEB_CACHE = new BlackboardArtifact.Type(52, "TSK_WEB_CACHE", bundle.getString("BlackboardArtifact.tskWebCache.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * A generic (timeline) event.
+		 */
+		public static final Type TSK_TL_EVENT = new BlackboardArtifact.Type(53, "TSK_TL_EVENT", bundle.getString("BlackboardArtifact.tskTLEvent.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Indicates clipboard content
+		 */
+		public static final Type TSK_CLIPBOARD_CONTENT = new BlackboardArtifact.Type(54, "TSK_CLIPBOARD_CONTENT", bundle.getString("BlackboardArtifact.tskClipboardContent.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * An associated object.
+		 */
+		public static final Type TSK_ASSOCIATED_OBJECT = new BlackboardArtifact.Type(55, "TSK_ASSOCIATED_OBJECT", bundle.getString("BlackboardArtifact.tskAssociatedObject.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Indicates file may have been created by the user.
+		 */
+		public static final Type TSK_USER_CONTENT_SUSPECTED = new BlackboardArtifact.Type(56, "TSK_USER_CONTENT_SUSPECTED", bundle.getString("BlackboardArtifact.tskUserContentSuspected.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * Stores metadata about an object.
+		 */
+		public static final Type TSK_METADATA = new BlackboardArtifact.Type(57, "TSK_METADATA", bundle.getString("BlackboardArtifact.tskMetadata.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Stores a GPS track log. Use
+		 * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addTrack()
+		 * to create track artifacts.
+		 */
+		public static final Type TSK_GPS_TRACK = new BlackboardArtifact.Type(58, "TSK_GPS_TRACK", bundle.getString("BlackboardArtifact.tskTrack.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Stores a role on a given domain.
+		 */
+		public static final Type TSK_WEB_ACCOUNT_TYPE = new BlackboardArtifact.Type(59, "TSK_WEB_ACCOUNT_TYPE", bundle.getString("BlackboardArtifact.tskWebAccountType.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * Screen shots from device or Application.
+		 */
+		public static final Type TSK_SCREEN_SHOTS = new BlackboardArtifact.Type(60, "TSK_SCREEN_SHOTS", bundle.getString("BlackboardArtifact.tskScreenShots.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Notifications Sent to User.
+		 */
+		public static final Type TSK_PROG_NOTIFICATIONS = new BlackboardArtifact.Type(62, "TSK_PROG_NOTIFICATIONS", bundle.getString("BlackboardArtifact.tskProgNotifications.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * System/Application/File backup.
+		 */
+		public static final Type TSK_BACKUP_EVENT = new BlackboardArtifact.Type(63, "TSK_BACKUP_EVENT", bundle.getString("BlackboardArtifact.tskBackupEvent.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Programs that have been deleted.
+		 */
+		public static final Type TSK_DELETED_PROG = new BlackboardArtifact.Type(64, "TSK_DELETED_PROG", bundle.getString("BlackboardArtifact.tskDeletedProg.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Activity on the System/Application.
+		 */
+		public static final Type TSK_USER_DEVICE_EVENT = new BlackboardArtifact.Type(65, "TSK_USER_DEVICE_EVENT", bundle.getString("BlackboardArtifact.tskUserDeviceEvent.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Indicates that the file had a yara pattern match hit.
+		 */
+		public static final Type TSK_YARA_HIT = new BlackboardArtifact.Type(66, "TSK_YARA_HIT", bundle.getString("BlackboardArtifact.tskYaraHit.text"), Category.ANALYSIS_RESULT);
+
+		/**
+		 * Stores the outline of an area using GPS coordinates.
+		 */
+		public static final Type TSK_GPS_AREA = new BlackboardArtifact.Type(67, "TSK_GPS_AREA", bundle.getString("BlackboardArtifact.tskGPSArea.text"), Category.DATA_ARTIFACT);
+
+		/**
+		 * Defines a category for a particular domain.
+		 */
+		public static final Type TSK_WEB_CATEGORIZATION = new BlackboardArtifact.Type(68, "TSK_WEB_CATEGORIZATION", bundle.getString("BlackboardArtifact.tskWebCategorization.text"), Category.ANALYSIS_RESULT);
+
+		// NOTE: When adding a new standard BlackboardArtifact.Type, add the instance and then add to the STANDARD_TYPES map.
+		/**
+		 * All standard artifact types with ids mapped to the type.
+		 */
+		static final Map<Integer, Type> STANDARD_TYPES = Collections.unmodifiableMap(Stream.of(
+				TSK_GEN_INFO,
+				TSK_WEB_BOOKMARK,
+				TSK_WEB_COOKIE,
+				TSK_WEB_HISTORY,
+				TSK_WEB_DOWNLOAD,
+				TSK_RECENT_OBJECT,
+				TSK_INSTALLED_PROG,
+				TSK_KEYWORD_HIT,
+				TSK_HASHSET_HIT,
+				TSK_DEVICE_ATTACHED,
+				TSK_INTERESTING_FILE_HIT,
+				TSK_EMAIL_MSG,
+				TSK_EXTRACTED_TEXT,
+				TSK_WEB_SEARCH_QUERY,
+				TSK_METADATA_EXIF,
+				TSK_OS_INFO,
+				TSK_SERVICE_ACCOUNT,
+				TSK_CONTACT,
+				TSK_MESSAGE,
+				TSK_CALLLOG,
+				TSK_CALENDAR_ENTRY,
+				TSK_SPEED_DIAL_ENTRY,
+				TSK_BLUETOOTH_PAIRING,
+				TSK_GPS_BOOKMARK,
+				TSK_GPS_LAST_KNOWN_LOCATION,
+				TSK_GPS_SEARCH,
+				TSK_PROG_RUN,
+				TSK_ENCRYPTION_DETECTED,
+				TSK_EXT_MISMATCH_DETECTED,
+				TSK_INTERESTING_ARTIFACT_HIT,
+				TSK_GPS_ROUTE,
+				TSK_REMOTE_DRIVE,
+				TSK_FACE_DETECTED,
+				TSK_ACCOUNT,
+				TSK_ENCRYPTION_SUSPECTED,
+				TSK_OBJECT_DETECTED,
+				TSK_WIFI_NETWORK,
+				TSK_DEVICE_INFO,
+				TSK_SIM_ATTACHED,
+				TSK_BLUETOOTH_ADAPTER,
+				TSK_WIFI_NETWORK_ADAPTER,
+				TSK_VERIFICATION_FAILED,
+				TSK_DATA_SOURCE_USAGE,
+				TSK_WEB_FORM_AUTOFILL,
+				TSK_WEB_FORM_ADDRESS,
+				TSK_WEB_CACHE,
+				TSK_TL_EVENT,
+				TSK_CLIPBOARD_CONTENT,
+				TSK_ASSOCIATED_OBJECT,
+				TSK_USER_CONTENT_SUSPECTED,
+				TSK_METADATA,
+				TSK_GPS_TRACK,
+				TSK_WEB_ACCOUNT_TYPE,
+				TSK_SCREEN_SHOTS,
+				TSK_PROG_NOTIFICATIONS,
+				TSK_BACKUP_EVENT,
+				TSK_DELETED_PROG,
+				TSK_USER_DEVICE_EVENT,
+				TSK_YARA_HIT,
+				TSK_GPS_AREA,
+				TSK_WEB_CATEGORIZATION
+		).collect(Collectors.toMap(type -> type.getTypeID(), type -> type)));
+
 		private final String typeName;
 		private final int typeID;
 		private final String displayName;
-
+		private final Category category;
+		
 		/**
 		 * Constructs a custom artifact type.
 		 *
 		 * @param typeName    The name of the type.
 		 * @param typeID      The id of the type.
 		 * @param displayName The display name of the type.
+		 * @param category    The artifact type category.
 		 */
-		public Type(int typeID, String typeName, String displayName) {
+		Type(int typeID, String typeName, String displayName, Category category) {
 			this.typeID = typeID;
 			this.typeName = typeName;
 			this.displayName = displayName;
+			this.category = category;
 		}
 
 		/**
@@ -847,7 +1382,7 @@ public Type(int typeID, String typeName, String displayName) {
 		 * @param type An element of the ARTIFACT_TYPE enum.
 		 */
 		public Type(ARTIFACT_TYPE type) {
-			this(type.getTypeID(), type.getLabel(), type.getDisplayName());
+			this(type.getTypeID(), type.getLabel(), type.getDisplayName(), type.getCategory());
 		}
 
 		/**
@@ -877,6 +1412,15 @@ public String getDisplayName() {
 			return this.displayName;
 		}
 
+		/**
+		 * Gets category of this artifact type.
+		 *
+		 * @return The artifact type category.
+		 */
+		public Category getCategory() {
+			return category;
+		}
+
 		/**
 		 * Tests this artifact type for equality with another object.
 		 *
@@ -934,40 +1478,40 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 * A generic information artifact.
 		 */
 		TSK_GEN_INFO(1, "TSK_GEN_INFO", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskGenInfo.text")),
+				bundle.getString("BlackboardArtifact.tskGenInfo.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A Web bookmark. Use methods in
 		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
 		 * create bookmark artifacts.
 		 */
 		TSK_WEB_BOOKMARK(2, "TSK_WEB_BOOKMARK", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebBookmark.text")),
+				bundle.getString("BlackboardArtifact.tskWebBookmark.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A Web cookie. Use methods in
 		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
 		 * create cookie artifacts.
 		 */
 		TSK_WEB_COOKIE(3, "TSK_WEB_COOKIE",
-				bundle.getString("BlackboardArtifact.tskWebCookie.text")), //NON-NLS				
+				bundle.getString("BlackboardArtifact.tskWebCookie.text"), Category.DATA_ARTIFACT), //NON-NLS
 		/**
 		 * A Web history. Use methods in
 		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
 		 * create history artifacts.
 		 */
 		TSK_WEB_HISTORY(4, "TSK_WEB_HISTORY", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebHistory.text")),
+				bundle.getString("BlackboardArtifact.tskWebHistory.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A Web download. Use methods in
 		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
 		 * create download artifacts.
 		 */
 		TSK_WEB_DOWNLOAD(5, "TSK_WEB_DOWNLOAD", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebDownload.text")),
+				bundle.getString("BlackboardArtifact.tskWebDownload.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A recent object.
 		 */
 		TSK_RECENT_OBJECT(6, "TSK_RECENT_OBJ", //NON-NLS
-				bundle.getString("BlackboardArtifact.tsk.recentObject.text")),
+				bundle.getString("BlackboardArtifact.tsk.recentObject.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A GPS track point (geolocation data).
 		 *
@@ -975,53 +1519,53 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 */
 		@Deprecated
 		TSK_GPS_TRACKPOINT(7, "TSK_GPS_TRACKPOINT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskGpsTrackpoint.text")),
+				bundle.getString("BlackboardArtifact.tskGpsTrackpoint.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An installed program.
 		 */
 		TSK_INSTALLED_PROG(8, "TSK_INSTALLED_PROG", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskInstalledProg.text")),
+				bundle.getString("BlackboardArtifact.tskInstalledProg.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A search hit for a keyword.
 		 */
 		TSK_KEYWORD_HIT(9, "TSK_KEYWORD_HIT",
-				bundle.getString("BlackboardArtifact.tskKeywordHits.text")),
+				bundle.getString("BlackboardArtifact.tskKeywordHits.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * A hit for a hash set (hash database).
 		 */
 		TSK_HASHSET_HIT(10, "TSK_HASHSET_HIT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskHashsetHit.text")),
+				bundle.getString("BlackboardArtifact.tskHashsetHit.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * An attached device.
 		 */
 		TSK_DEVICE_ATTACHED(11, "TSK_DEVICE_ATTACHED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskDeviceAttached.text")),
+				bundle.getString("BlackboardArtifact.tskDeviceAttached.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An meta-artifact to call attention to a file deemed to be
 		 * interesting.
 		 */
 		TSK_INTERESTING_FILE_HIT(12, "TSK_INTERESTING_FILE_HIT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskInterestingFileHit.text")), ///< an interesting/notable file hit
+				bundle.getString("BlackboardArtifact.tskInterestingFileHit.text"), Category.ANALYSIS_RESULT), ///< an interesting/notable file hit
 		/**
 		 * An email message.
 		 */
 		TSK_EMAIL_MSG(13, "TSK_EMAIL_MSG", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskEmailMsg.text")),
+				bundle.getString("BlackboardArtifact.tskEmailMsg.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Text extracted from the source content.
 		 */
 		TSK_EXTRACTED_TEXT(14, "TSK_EXTRACTED_TEXT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskExtractedText.text")),
+				bundle.getString("BlackboardArtifact.tskExtractedText.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A Web search engine query extracted from Web history.
 		 */
 		TSK_WEB_SEARCH_QUERY(15, "TSK_WEB_SEARCH_QUERY", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebSearchQuery.text")),
+				bundle.getString("BlackboardArtifact.tskWebSearchQuery.text"), Category.DATA_ARTIFACT),
 		/**
 		 * EXIF metadata.
 		 */
 		TSK_METADATA_EXIF(16, "TSK_METADATA_EXIF", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskMetadataExif.text")),
+				bundle.getString("BlackboardArtifact.tskMetadataExif.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * A tag applied to a file.
 		 *
@@ -1029,7 +1573,7 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 */
 		@Deprecated
 		TSK_TAG_FILE(17, "TSK_TAG_FILE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tagFile.text")),
+				bundle.getString("BlackboardArtifact.tagFile.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * A tag applied to an artifact.
 		 *
@@ -1037,22 +1581,23 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 */
 		@Deprecated
 		TSK_TAG_ARTIFACT(18, "TSK_TAG_ARTIFACT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskTagArtifact.text")),
+				bundle.getString("BlackboardArtifact.tskTagArtifact.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * Information pertaining to an operating system.
 		 */
 		TSK_OS_INFO(19, "TSK_OS_INFO", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskOsInfo.text")),
+				bundle.getString("BlackboardArtifact.tskOsInfo.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An operating system user account.
 		 */
+		@Deprecated
 		TSK_OS_ACCOUNT(20, "TSK_OS_ACCOUNT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskOsAccount.text")),
+				bundle.getString("BlackboardArtifact.tskOsAccount.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An application or Web service account.
 		 */
 		TSK_SERVICE_ACCOUNT(21, "TSK_SERVICE_ACCOUNT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskServiceAccount.text")),
+				bundle.getString("BlackboardArtifact.tskServiceAccount.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Output from an external tool or module (raw text).
 		 *
@@ -1060,7 +1605,7 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 */
 		@Deprecated
 		TSK_TOOL_OUTPUT(22, "TSK_TOOL_OUTPUT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskToolOutput.text")),
+				bundle.getString("BlackboardArtifact.tskToolOutput.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A contact extracted from a phone, or from an address
 		 * book/email/messaging application. Use methods in
@@ -1068,7 +1613,7 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 * to create contact artifacts.
 		 */
 		TSK_CONTACT(23, "TSK_CONTACT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskContact.text")),
+				bundle.getString("BlackboardArtifact.tskContact.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An SMS/MMS message extracted from phone, or from another messaging
 		 * application, like IM. Use methods in
@@ -1076,7 +1621,7 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 * to create message artifacts.
 		 */
 		TSK_MESSAGE(24, "TSK_MESSAGE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskMessage.text")),
+				bundle.getString("BlackboardArtifact.tskMessage.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A phone call log extracted from a phone or softphone application. Use
 		 * methods in
@@ -1084,139 +1629,139 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 * to create call log artifacts.
 		 */
 		TSK_CALLLOG(25, "TSK_CALLLOG", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskCalllog.text")),
+				bundle.getString("BlackboardArtifact.tskCalllog.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A calendar entry from a phone, PIM, or a calendar application.
 		 */
 		TSK_CALENDAR_ENTRY(26, "TSK_CALENDAR_ENTRY", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskCalendarEntry.text")),
+				bundle.getString("BlackboardArtifact.tskCalendarEntry.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A speed dial entry from a phone.
 		 */
 		TSK_SPEED_DIAL_ENTRY(27, "TSK_SPEED_DIAL_ENTRY", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskSpeedDialEntry.text")),
+				bundle.getString("BlackboardArtifact.tskSpeedDialEntry.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A bluetooth pairing entry.
 		 */
 		TSK_BLUETOOTH_PAIRING(28, "TSK_BLUETOOTH_PAIRING", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskBluetoothPairing.text")),
+				bundle.getString("BlackboardArtifact.tskBluetoothPairing.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A GPS bookmark / way point that the user saved.
 		 */
 		TSK_GPS_BOOKMARK(29, "TSK_GPS_BOOKMARK", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskGpsBookmark.text")),
+				bundle.getString("BlackboardArtifact.tskGpsBookmark.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A GPS last known location record.
 		 */
 		TSK_GPS_LAST_KNOWN_LOCATION(30, "TSK_GPS_LAST_KNOWN_LOCATION", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskGpsLastKnownLocation.text")),
+				bundle.getString("BlackboardArtifact.tskGpsLastKnownLocation.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A GPS search record.
 		 */
 		TSK_GPS_SEARCH(31, "TSK_GPS_SEARCH", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskGpsSearch.text")),
+				bundle.getString("BlackboardArtifact.tskGpsSearch.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Application run information.
 		 */
 		TSK_PROG_RUN(32, "TSK_PROG_RUN", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskProgRun.text")),
+				bundle.getString("BlackboardArtifact.tskProgRun.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An encrypted file.
 		 */
 		TSK_ENCRYPTION_DETECTED(33, "TSK_ENCRYPTION_DETECTED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskEncryptionDetected.text")),
+				bundle.getString("BlackboardArtifact.tskEncryptionDetected.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * A file with an extension that does not match its MIME type.
 		 */
 		TSK_EXT_MISMATCH_DETECTED(34, "TSK_EXT_MISMATCH_DETECTED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskExtMismatchDetected.text")),
+				bundle.getString("BlackboardArtifact.tskExtMismatchDetected.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * An meta-artifact to call attention to an artifact deemed to be
 		 * interesting.
 		 */
 		TSK_INTERESTING_ARTIFACT_HIT(35, "TSK_INTERESTING_ARTIFACT_HIT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskInterestingArtifactHit.text")),
+				bundle.getString("BlackboardArtifact.tskInterestingArtifactHit.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * A route based on GPS coordinates. Use
 		 * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addRoute()
 		 * to create route artifacts.
 		 */
 		TSK_GPS_ROUTE(36, "TSK_GPS_ROUTE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskGpsRoute.text")),
+				bundle.getString("BlackboardArtifact.tskGpsRoute.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A remote drive.
 		 */
 		TSK_REMOTE_DRIVE(37, "TSK_REMOTE_DRIVE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskRemoteDrive.text")),
+				bundle.getString("BlackboardArtifact.tskRemoteDrive.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A human face was detected in a media file.
 		 */
 		TSK_FACE_DETECTED(38, "TSK_FACE_DETECTED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskFaceDetected.text")),
+				bundle.getString("BlackboardArtifact.tskFaceDetected.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * An account.
 		 */
 		TSK_ACCOUNT(39, "TSK_ACCOUNT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskAccount.text")),
+				bundle.getString("BlackboardArtifact.tskAccount.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An encrypted file.
 		 */
 		TSK_ENCRYPTION_SUSPECTED(40, "TSK_ENCRYPTION_SUSPECTED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskEncryptionSuspected.text")),
+				bundle.getString("BlackboardArtifact.tskEncryptionSuspected.text"), Category.ANALYSIS_RESULT),
 		/*
 		 * A classifier detected an object in a media file.
 		 */
 		TSK_OBJECT_DETECTED(41, "TSK_OBJECT_DETECTED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskObjectDetected.text")),
+				bundle.getString("BlackboardArtifact.tskObjectDetected.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * A wireless network.
 		 */
 		TSK_WIFI_NETWORK(42, "TSK_WIFI_NETWORK", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWIFINetwork.text")),
+				bundle.getString("BlackboardArtifact.tskWIFINetwork.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Information related to a device.
 		 */
 		TSK_DEVICE_INFO(43, "TSK_DEVICE_INFO", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskDeviceInfo.text")),
+				bundle.getString("BlackboardArtifact.tskDeviceInfo.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A SIM card.
 		 */
 		TSK_SIM_ATTACHED(44, "TSK_SIM_ATTACHED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskSimAttached.text")),
+				bundle.getString("BlackboardArtifact.tskSimAttached.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A bluetooth adapter.
 		 */
 		TSK_BLUETOOTH_ADAPTER(45, "TSK_BLUETOOTH_ADAPTER", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskBluetoothAdapter.text")),
+				bundle.getString("BlackboardArtifact.tskBluetoothAdapter.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A wireless network adapter.
 		 */
 		TSK_WIFI_NETWORK_ADAPTER(46, "TSK_WIFI_NETWORK_ADAPTER", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWIFINetworkAdapter.text")),
+				bundle.getString("BlackboardArtifact.tskWIFINetworkAdapter.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates a verification failure
 		 */
 		TSK_VERIFICATION_FAILED(47, "TSK_VERIFICATION_FAILED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskVerificationFailed.text")),
+				bundle.getString("BlackboardArtifact.tskVerificationFailed.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * Categorization information for a data source.
 		 */
 		TSK_DATA_SOURCE_USAGE(48, "TSK_DATA_SOURCE_USAGE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskDataSourceUsage.text")),
+				bundle.getString("BlackboardArtifact.tskDataSourceUsage.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * Indicates auto fill data from a Web form. Use methods in
 		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
 		 * create web form autofill artifacts.
 		 */
 		TSK_WEB_FORM_AUTOFILL(49, "TSK_WEB_FORM_AUTOFILL", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebFormAutofill.text")),
+				bundle.getString("BlackboardArtifact.tskWebFormAutofill.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates an person's address filled in a web form. Use methods in
 		 * org.sleuthkit.datamodel.blackboardutils.WebBrowserArtifactsHelper to
 		 * create web form address artifacts.
 		 */
 		TSK_WEB_FORM_ADDRESS(50, "TSK_WEB_FORM_ADDRESSES ", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebFormAddresses.text")),
+				bundle.getString("BlackboardArtifact.tskWebFormAddresses.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates source of a file/object
 		 *
@@ -1225,89 +1770,86 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 */
 		@Deprecated
 		TSK_DOWNLOAD_SOURCE(51, "TSK_DOWNLOAD_SOURCE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskDownloadSource.text")),
+				bundle.getString("BlackboardArtifact.tskDownloadSource.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates web cache data
 		 */
 		TSK_WEB_CACHE(52, "TSK_WEB_CACHE", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskWebCache.text")),
+				bundle.getString("BlackboardArtifact.tskWebCache.text"), Category.DATA_ARTIFACT),
 		/**
 		 * A generic (timeline) event.
 		 */
 		TSK_TL_EVENT(53, "TSK_TL_EVENT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskTLEvent.text")),
+				bundle.getString("BlackboardArtifact.tskTLEvent.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates clipboard content
 		 */
 		TSK_CLIPBOARD_CONTENT(54, "TSK_CLIPBOARD_CONTENT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskClipboardContent.text")),
+				bundle.getString("BlackboardArtifact.tskClipboardContent.text"), Category.DATA_ARTIFACT),
 		/**
 		 * An associated object.
 		 */
 		TSK_ASSOCIATED_OBJECT(55, "TSK_ASSOCIATED_OBJECT", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskAssociatedObject.text")),
+				bundle.getString("BlackboardArtifact.tskAssociatedObject.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates file may have been created by the user.
 		 */
 		TSK_USER_CONTENT_SUSPECTED(56, "TSK_USER_CONTENT_SUSPECTED", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskUserContentSuspected.text")),
+				bundle.getString("BlackboardArtifact.tskUserContentSuspected.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * Stores metadata about an object.
 		 */
 		TSK_METADATA(57, "TSK_METADATA", //NON-NLS
-				bundle.getString("BlackboardArtifact.tskMetadata.text")),
+				bundle.getString("BlackboardArtifact.tskMetadata.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Stores a GPS track log. Use
 		 * org.sleuthkit.datamodel.blackboardutils.GeoArtifactsHelper.addTrack()
 		 * to create track artifacts.
 		 */
 		TSK_GPS_TRACK(58, "TSK_GPS_TRACK",
-				bundle.getString("BlackboardArtifact.tskTrack.text")),
+				bundle.getString("BlackboardArtifact.tskTrack.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Stores a role on a given domain.
 		 */
 		TSK_WEB_ACCOUNT_TYPE(59, "TSK_WEB_ACCOUNT_TYPE",
-				bundle.getString("BlackboardArtifact.tskWebAccountType.text")),
+				bundle.getString("BlackboardArtifact.tskWebAccountType.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * Screen shots from device or Application.
 		 */
 		TSK_SCREEN_SHOTS(60, "TSK_SCREEN_SHOTS",
-				bundle.getString("BlackboardArtifact.tskScreenShots.text")),
+				bundle.getString("BlackboardArtifact.tskScreenShots.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Notifications Sent to User.
 		 */
 		TSK_PROG_NOTIFICATIONS(62, "TSK_PROG_NOTIFICATIONS",
-				bundle.getString("BlackboardArtifact.tskProgNotifications.text")),
+				bundle.getString("BlackboardArtifact.tskProgNotifications.text"), Category.DATA_ARTIFACT),
 		/**
 		 * System/Application/File backup.
 		 */
 		TSK_BACKUP_EVENT(63, "TSK_BACKUP_EVENT",
-				bundle.getString("BlackboardArtifact.tskBackupEvent.text")),
+				bundle.getString("BlackboardArtifact.tskBackupEvent.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Programs that have been deleted.
 		 */
 		TSK_DELETED_PROG(64, "TSK_DELETED_PROG",
-				bundle.getString("BlackboardArtifact.tskDeletedProg.text")),
+				bundle.getString("BlackboardArtifact.tskDeletedProg.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Activity on the System/Application.
 		 */
 		TSK_USER_DEVICE_EVENT(65, "TSK_USER_DEVICE_EVENT",
-				bundle.getString("BlackboardArtifact.tskUserDeviceEvent.text")),
+				bundle.getString("BlackboardArtifact.tskUserDeviceEvent.text"), Category.DATA_ARTIFACT),
 		/**
 		 * Indicates that the file had a yara pattern match hit.
 		 */
 		TSK_YARA_HIT(66, "TSK_YARA_HIT",
-				bundle.getString("BlackboardArtifact.tskYaraHit.text")),
+				bundle.getString("BlackboardArtifact.tskYaraHit.text"), Category.ANALYSIS_RESULT),
 		/**
 		 * Stores the outline of an area using GPS coordinates.
 		 */
 		TSK_GPS_AREA(67, "TSK_GPS_AREA",
-				bundle.getString("BlackboardArtifact.tskGPSArea.text")),
-
+				bundle.getString("BlackboardArtifact.tskGPSArea.text"), Category.DATA_ARTIFACT),
 		TSK_WEB_CATEGORIZATION(68, "TSK_WEB_CATEGORIZATION",
-				bundle.getString("BlackboardArtifact.tskWebCategorization.text")),
-
-		;
+				bundle.getString("BlackboardArtifact.tskWebCategorization.text"), Category.ANALYSIS_RESULT),;
 
 		/*
 		 * To developers: For each new artifact, ensure that: - The enum value
@@ -1317,6 +1859,7 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		private final String label;
 		private final int typeId;
 		private final String displayName;
+		private final Category category;
 
 		/**
 		 * Constructs a value for the standard artifact types enum.
@@ -1326,9 +1869,22 @@ public enum ARTIFACT_TYPE implements SleuthkitVisitableItem {
 		 * @param displayName The type display name.
 		 */
 		private ARTIFACT_TYPE(int typeId, String label, String displayName) {
+			this(typeId, label, displayName, Category.DATA_ARTIFACT);
+		}
+
+		/**
+		 * Constructs a value for the standard artifact types enum.
+		 *
+		 * @param typeId      The type id.
+		 * @param label       The type name.
+		 * @param displayName The type display name.
+		 * @param category	   The type category.
+		 */
+		private ARTIFACT_TYPE(int typeId, String label, String displayName, Category category) {
 			this.typeId = typeId;
 			this.label = label;
 			this.displayName = displayName;
+			this.category = category;
 		}
 
 		/**
@@ -1349,6 +1905,15 @@ public String getLabel() {
 			return this.label;
 		}
 
+		/**
+		 * Gets the type category for this standard artifact type.
+		 *
+		 * @return The type category.
+		 */
+		public Category getCategory() {
+			return this.category;
+		}
+
 		/**
 		 * Gets the standard artifact type enum value that corresponds to a
 		 * given type name (label).
@@ -1412,6 +1977,82 @@ public <T> T accept(SleuthkitItemVisitor<T> visitor) {
 
 	}
 
+	/**
+	 * Enumeration to encapsulate categories of artifact.
+	 *
+	 * Some artifact types represent data directly extracted from a data source,
+	 * while others may be the result of some analysis done on the extracted
+	 * data.
+	 */
+	public enum Category {
+		// NOTE: The schema code defaults to '0', so that code must be updated too if DATA_ARTIFACT changes from being 0
+		DATA_ARTIFACT(0, "DATA_ARTIFACT", ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("CategoryType.DataArtifact")), // artifact is data that is directly/indirectly extracted from a data source.
+		ANALYSIS_RESULT(1, "ANALYSIS_RESULT", ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("CategoryType.AnalysisResult")); // artifacts represents outcome of analysis of data.
+
+		private final Integer id;
+		private final String name;
+		private final String displayName;
+
+		private final static Map<Integer, Category> idToCategory = new HashMap<Integer, Category>();
+
+		static {
+			for (Category status : values()) {
+				idToCategory.put(status.getID(), status);
+			}
+		}
+
+		/**
+		 * Constructs a value for the category enum.
+		 *
+		 * @param id             The category id.
+		 * @param name           The category name
+		 * @param displayNameKey Category display name.
+		 */
+		private Category(Integer id, String name, String displayName) {
+			this.id = id;
+			this.name = name;
+			this.displayName = displayName;
+		}
+
+		/**
+		 * Gets the category value with the given id, if one exists.
+		 *
+		 * @param id A category id.
+		 *
+		 * @return The category with the given id, or null if none exists.
+		 */
+		public static Category fromID(int id) {
+			return idToCategory.get(id);
+		}
+
+		/**
+		 * Gets the id of this review status.
+		 *
+		 * @return The id of this review status.
+		 */
+		public Integer getID() {
+			return id;
+		}
+
+		/**
+		 * Gets the name of this category.
+		 *
+		 * @return The name of this category.
+		 */
+		String getName() {
+			return name;
+		}
+
+		/**
+		 * Gets the display name of this category.
+		 *
+		 * @return The display name of this category.
+		 */
+		public String getDisplayName() {
+			return displayName;
+		}
+	}
+
 	/**
 	 * Enum to represent the review status of an artifact.
 	 */
diff --git a/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java b/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java
index dbe1b340d8d9d7cb3121ea715cd85af85892603d..5696355d4cd0f109aa1d5887208f19d78342418c 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2011-2020 Basis Technology Corp.
+ * Copyright 2011-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -42,28 +42,22 @@
  * attribute by calling the appropriate BlackboardAttribute constructor. It can
  * also be used to do blackboard queries involving the custom type.
  */
-public class BlackboardAttribute {
+public class BlackboardAttribute extends AbstractAttribute {
 
-	private static final char[] HEX_ARRAY = "0123456789ABCDEF".toCharArray();
 	private static final Logger LOGGER = Logger.getLogger(BlackboardAttribute.class.getName());
 
 	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
-	private BlackboardAttribute.Type attributeType;
-	private final int valueInt;
-	private final long valueLong;
-	private final double valueDouble;
-	private final String valueString;
-	private final byte[] valueBytes;
+
 	private String context;
-	private long artifactID;
-	private SleuthkitCase sleuthkitCase;
 	private String sources;
-	
+
+	private long artifactID;
+
 	// Cached parent artifact. This field is populated lazily upon the first
 	// call to getParentArtifact().
 	private BlackboardArtifact parentArtifact;
-	
-	// The parent data source is defined as being 
+
+	// The parent data source is defined as being
 	// the data source of the parent artifact.
 	private Long parentDataSourceID;
 
@@ -80,17 +74,8 @@ public class BlackboardAttribute {
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER.
 	 */
 	public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, int valueInt) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) {
-			throw new IllegalArgumentException("Value types do not match");
-		}
-		this.artifactID = 0;
-		this.attributeType = new BlackboardAttribute.Type(attributeType);
+		super(new BlackboardAttribute.Type(attributeType), valueInt);
 		this.sources = replaceNulls(source);
-		this.valueInt = valueInt;
-		this.valueLong = 0;
-		this.valueDouble = 0;
-		this.valueString = "";
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -107,17 +92,8 @@ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, int valu
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER.
 	 */
 	public BlackboardAttribute(Type attributeType, String source, int valueInt) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) {
-			throw new IllegalArgumentException("Type mismatched with value type");
-		}
-		this.artifactID = 0;
-		this.attributeType = attributeType;
+		super(attributeType, valueInt);
 		this.sources = replaceNulls(source);
-		this.valueInt = valueInt;
-		this.valueLong = 0;
-		this.valueDouble = 0;
-		this.valueString = "";
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -137,18 +113,8 @@ public BlackboardAttribute(Type attributeType, String source, int valueInt) thro
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME.
 	 */
 	public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, long valueLong) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG
-				&& attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) {
-			throw new IllegalArgumentException("Value types do not match");
-		}
-		this.artifactID = 0;
-		this.attributeType = new BlackboardAttribute.Type(attributeType);
+		super(new BlackboardAttribute.Type(attributeType), valueLong);
 		this.sources = replaceNulls(source);
-		this.valueInt = 0;
-		this.valueLong = valueLong;
-		this.valueDouble = 0;
-		this.valueString = "";
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -167,18 +133,8 @@ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, long val
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME.
 	 */
 	public BlackboardAttribute(Type attributeType, String source, long valueLong) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG
-				&& attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME) {
-			throw new IllegalArgumentException("Type mismatched with value type");
-		}
-		this.artifactID = 0;
-		this.attributeType = attributeType;
+		super(attributeType, valueLong);
 		this.sources = replaceNulls(source);
-		this.valueInt = 0;
-		this.valueLong = valueLong;
-		this.valueDouble = 0;
-		this.valueString = "";
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -195,19 +151,9 @@ public BlackboardAttribute(Type attributeType, String source, long valueLong) th
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE.
 	 */
 	public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, double valueDouble) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) {
-			throw new IllegalArgumentException("Value types do not match");
-		}
-		this.artifactID = 0;
-		this.attributeType = new BlackboardAttribute.Type(attributeType);
+		super(new BlackboardAttribute.Type(attributeType), valueDouble);
 		this.sources = replaceNulls(source);
-		this.valueInt = 0;
-		this.valueLong = 0;
-		this.valueDouble = valueDouble;
-		this.valueString = "";
-		this.valueBytes = new byte[0];
 		this.context = "";
-
 	}
 
 	/**
@@ -223,17 +169,8 @@ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, double v
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE.
 	 */
 	public BlackboardAttribute(Type attributeType, String source, double valueDouble) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) {
-			throw new IllegalArgumentException("Type mismatched with value type");
-		}
-		this.artifactID = 0;
-		this.attributeType = attributeType;
+		super(attributeType, valueDouble);
 		this.sources = replaceNulls(source);
-		this.valueInt = 0;
-		this.valueLong = 0;
-		this.valueDouble = valueDouble;
-		this.valueString = "";
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -252,22 +189,8 @@ public BlackboardAttribute(Type attributeType, String source, double valueDouble
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON
 	 */
 	public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, String valueString) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING
-				&& attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) {
-			throw new IllegalArgumentException("Value types do not match");
-		}
-		this.artifactID = 0;
-		this.attributeType = new BlackboardAttribute.Type(attributeType);
+		super(new BlackboardAttribute.Type(attributeType), valueString);
 		this.sources = replaceNulls(source);
-		this.valueInt = 0;
-		this.valueLong = 0;
-		this.valueDouble = 0;
-		if (valueString == null) {
-			this.valueString = "";
-		} else {
-			this.valueString = replaceNulls(valueString).trim();
-		}
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -284,22 +207,8 @@ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, String v
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING.
 	 */
 	public BlackboardAttribute(Type attributeType, String source, String valueString) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING
-				&& attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) {
-			throw new IllegalArgumentException("Type mismatched with value type");
-		}
-		this.artifactID = 0;
-		this.attributeType = attributeType;
+		super(attributeType, valueString);
 		this.sources = replaceNulls(source);
-		this.valueInt = 0;
-		this.valueLong = 0;
-		this.valueDouble = 0;
-		if (valueString == null) {
-			this.valueString = "";
-		} else {
-			this.valueString = replaceNulls(valueString).trim();
-		}
-		this.valueBytes = new byte[0];
 		this.context = "";
 	}
 
@@ -316,22 +225,9 @@ public BlackboardAttribute(Type attributeType, String source, String valueString
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE.
 	 */
 	public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, byte[] valueBytes) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) {
-			throw new IllegalArgumentException("Value types do not match");
-		}
-		this.artifactID = 0;
-		this.attributeType = new BlackboardAttribute.Type(attributeType);
+		super(new BlackboardAttribute.Type(attributeType), valueBytes);
 		this.sources = replaceNulls(source);
 		this.context = "";
-		this.valueInt = 0;
-		this.valueLong = 0;
-		this.valueDouble = 0;
-		this.valueString = "";
-		if (valueBytes == null) {
-			this.valueBytes = new byte[0];
-		} else {
-			this.valueBytes = valueBytes;
-		}
 	}
 
 	/**
@@ -347,22 +243,9 @@ public BlackboardAttribute(ATTRIBUTE_TYPE attributeType, String source, byte[] v
 	 *                                  TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE.
 	 */
 	public BlackboardAttribute(Type attributeType, String source, byte[] valueBytes) throws IllegalArgumentException {
-		if (attributeType.getValueType() != TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) {
-			throw new IllegalArgumentException("Type mismatched with value type");
-		}
-		this.artifactID = 0;
-		this.attributeType = attributeType;
+		super(attributeType, valueBytes);
 		this.sources = replaceNulls(source);
 		this.context = "";
-		this.valueInt = 0;
-		this.valueLong = 0;
-		this.valueDouble = 0;
-		this.valueString = "";
-		if (valueBytes == null) {
-			this.valueBytes = new byte[0];
-		} else {
-			this.valueBytes = valueBytes;
-		}
 	}
 
 	/**
@@ -377,72 +260,12 @@ public long getArtifactID() {
 	}
 
 	/**
-	 * Gets the type of this attribute.
-	 *
-	 * @return The attribute type.
-	 */
-	public BlackboardAttribute.Type getAttributeType() {
-		return this.attributeType;
-	}
-
-	/**
-	 * Gets the value type.
-	 *
-	 * @return The value type
-	 */
-	public TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE getValueType() {
-		return attributeType.getValueType();
-	}
-
-	/**
-	 * Gets the value of this attribute. The value is only valid if the
-	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER.
-	 *
-	 * @return The attribute value.
-	 */
-	public int getValueInt() {
-		return valueInt;
-	}
-
-	/**
-	 * Gets the value of this attribute. The value is only valid if the
-	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG.
-	 *
-	 * @return The attribute value.
-	 */
-	public long getValueLong() {
-		return valueLong;
-	}
-
-	/**
-	 * Gets the value of this attribute. The value is only valid if the
-	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE.
-	 *
-	 * @return The attribute value.
-	 */
-	public double getValueDouble() {
-		return valueDouble;
-	}
-
-	/**
-	 * Gets the value of this attribute. The value is only valid if the
-	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING or
-	 * TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON.
-	 *
-	 * @return The attribute value.
-	 */
-	public String getValueString() {
-		return valueString;
-	}
-
-	/**
-	 * Gets the value of this attribute. The value is only valid if the
-	 * attribute value type is TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE.
+	 * Sets the artifact id.
 	 *
-	 * @return The attribute value.
+	 * @param artifactID The artifact id.
 	 */
-	public byte[] getValueBytes() {
-		return Arrays.copyOf(valueBytes, valueBytes.length);
+	void setArtifactId(long artifactID) {
+		this.artifactID = artifactID;
 	}
 
 	/**
@@ -467,7 +290,7 @@ public List<String> getSources() {
 	 * @throws org.sleuthkit.datamodel.TskCoreException
 	 */
 	public void addSource(String source) throws TskCoreException {
-		this.sources = sleuthkitCase.addSourceToArtifactAttribute(this, source);
+		this.sources = getCaseDatabase().addSourceToArtifactAttribute(this, source);
 	}
 
 	/**
@@ -483,33 +306,35 @@ public void addSource(String source) throws TskCoreException {
 	 */
 	public BlackboardArtifact getParentArtifact() throws TskCoreException {
 		if (parentArtifact == null) {
-			parentArtifact = sleuthkitCase.getBlackboardArtifact(artifactID);
+			parentArtifact = getCaseDatabase().getBlackboardArtifact(getArtifactID());
 		}
 		return parentArtifact;
 	}
 
 	@Override
 	public int hashCode() {
-		int hash = 5;
-		hash = 97 * hash + (int) (this.artifactID ^ (this.artifactID >>> 32));
-		return hash;
+		return Objects.hash(
+				this.getAttributeType(), this.getValueInt(), this.getValueLong(), this.getValueDouble(),
+				this.getValueString(), this.getValueBytes(), this.getSources(), getContext());
 	}
 
 	@Override
-	public boolean equals(Object obj) {
-		if (obj == null) {
-			return false;
-		}
-		if (getClass() != obj.getClass()) {
+	public boolean equals(Object that) {
+		if (this == that) {
+			return true;
+		} else if (that instanceof BlackboardAttribute) {
+			BlackboardAttribute other = (BlackboardAttribute) that;
+			Object[] thisObject = new Object[]{this.getSources(), this.getContext()};
+			Object[] otherObject = new Object[]{other.getSources(), other.getContext()};
+			return areValuesEqual(that) && Objects.deepEquals(thisObject, otherObject);
+		} else {
 			return false;
 		}
-		final BlackboardAttribute other = (BlackboardAttribute) obj;
-		return this.artifactID == other.getArtifactID();
 	}
 
 	@Override
 	public String toString() {
-		return "BlackboardAttribute{" + "artifactID=" + artifactID + ", attributeType=" + attributeType.toString() + ", moduleName=" + sources + ", context=" + context + ", valueInt=" + valueInt + ", valueLong=" + valueLong + ", valueDouble=" + valueDouble + ", valueString=" + valueString + ", valueBytes=" + Arrays.toString(valueBytes) + ", Case=" + sleuthkitCase + '}'; //NON-NLS
+		return "BlackboardAttribute{" + "artifactID=" + getArtifactID() + ", attributeType=" + getAttributeType().toString() + ", moduleName=" + getSources() + ", context=" + context + ", valueInt=" + getValueInt() + ", valueLong=" + getValueLong() + ", valueDouble=" + getValueDouble() + ", valueString=" + getValueString() + ", valueBytes=" + Arrays.toString(getValueBytes()) + ", Case=" + getCaseDatabase() + '}'; //NON-NLS
 	}
 
 	/**
@@ -517,36 +342,16 @@ public String toString() {
 	 *
 	 * @return The value as a string.
 	 */
+	@Override
 	public String getDisplayString() {
-		switch (attributeType.getValueType()) {
-			case STRING:
-				return getValueString();
-			case INTEGER:
-				if (attributeType.getTypeID() == ATTRIBUTE_TYPE.TSK_READ_STATUS.getTypeID()) {
-					if (getValueInt() == 0) {
-						return "Unread";
-					} else {
-						return "Read";
-					}
-				}
-				return Integer.toString(getValueInt());
-			case LONG:
-				// SHOULD at some point figure out how to convert times in here 
-				// based on preferred formats and such.  Perhaps provide another 
-				// method that takes a formatter argument. 
-				return Long.toString(getValueLong());
-			case DOUBLE:
-				return Double.toString(getValueDouble());
-			case BYTE:
-				return bytesToHexString(getValueBytes());
-
+		switch (getAttributeType().getValueType()) {
 			case DATETIME: {
 				try {
 					if (parentDataSourceID == null) {
 						BlackboardArtifact parent = getParentArtifact();
 						parentDataSourceID = parent.getDataSourceObjectID();
 					}
-					final Content dataSource = sleuthkitCase.getContentById(parentDataSourceID);
+					final Content dataSource = parentDataSourceID != null ? getCaseDatabase().getContentById(parentDataSourceID) : null;
 					if ((dataSource != null) && (dataSource instanceof Image)) {
 						// return the date/time string in the timezone associated with the datasource,
 						Image image = (Image) dataSource;
@@ -559,11 +364,10 @@ public String getDisplayString() {
 				// return time string in default timezone
 				return TimeUtilities.epochToTime(getValueLong());
 			}
-			case JSON: {
-				return getValueString();
+			default: {
+				return super.getDisplayString();
 			}
 		}
-		return "";
 	}
 
 	/**
@@ -588,52 +392,19 @@ public String getDisplayString() {
 			int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes,
 			SleuthkitCase sleuthkitCase) {
 
+		super(attributeType, valueInt, valueLong, valueDouble, valueString, valueBytes, sleuthkitCase);
 		this.artifactID = artifactID;
-		this.attributeType = attributeType;
 		this.sources = replaceNulls(source);
 		this.context = replaceNulls(context);
-		this.valueInt = valueInt;
-		this.valueLong = valueLong;
-		this.valueDouble = valueDouble;
-		if (valueString == null) {
-			this.valueString = "";
-		} else {
-			this.valueString = replaceNulls(valueString).trim();
-		}
-		if (valueBytes == null) {
-			this.valueBytes = new byte[0];
-		} else {
-			this.valueBytes = valueBytes;
-		}
-		this.sleuthkitCase = sleuthkitCase;
-	}
-
-	/**
-	 * Sets the reference to the SleuthkitCase object that represents the case
-	 * database.
-	 *
-	 * @param sleuthkitCase A reference to a SleuthkitCase object.
-	 */
-	void setCaseDatabase(SleuthkitCase sleuthkitCase) {
-		this.sleuthkitCase = sleuthkitCase;
 	}
 
 	/**
-	 * Sets the artifact id.
+	 * Sets the parent data source id. The parent data source is defined as
+	 * being the data source of the parent artifact.
 	 *
-	 * @param artifactID The artifact id.
-	 */
-	void setArtifactId(long artifactID) {
-		this.artifactID = artifactID;
-	}
-	
-	/**
-	 * Sets the parent data source id. The parent data source is defined
-	 * as being the data source of the parent artifact.
-	 * 
 	 * @param parentDataSourceID The parent data source id.
 	 */
-	void setParentDataSourceID(long parentDataSourceID) {
+	void setParentDataSourceID(Long parentDataSourceID) {
 		this.parentDataSourceID = parentDataSourceID;
 	}
 
@@ -649,40 +420,350 @@ String getSourcesCSV() {
 		return sources;
 	}
 
-	/**
-	 * Converts a byte array to a string.
-	 *
-	 * @param bytes The byte array.
-	 *
-	 * @return The string.
-	 */
-	static String bytesToHexString(byte[] bytes) {
-		// from http://stackoverflow.com/questions/9655181/convert-from-byte-array-to-hex-string-in-java
-		char[] hexChars = new char[bytes.length * 2];
-		for (int j = 0; j < bytes.length; j++) {
-			int v = bytes[j] & 0xFF;
-			hexChars[j * 2] = HEX_ARRAY[v >>> 4];
-			hexChars[j * 2 + 1] = HEX_ARRAY[v & 0x0F];
-		}
-		return new String(hexChars);
-	}
-
-	/**
-	 * Replace all NUL characters in the string with the SUB character
-	 *
-	 * @param text The input string.
-	 *
-	 * @return The output string.
-	 */
-	private String replaceNulls(String text) {
-		return text.replace((char) 0x00, (char) 0x1A);
-	}
-
 	/**
 	 * Represents the type of an attribute.
 	 */
 	public static final class Type implements Serializable {
 
+		public static final Type TSK_URL = new Type(1, "TSK_URL", bundle.getString("BlackboardAttribute.tskUrl.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DATETIME = new Type(2, "TSK_DATETIME", bundle.getString("BlackboardAttribute.tskDatetime.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_NAME = new Type(3, "TSK_NAME", bundle.getString("BlackboardAttribute.tskName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PROG_NAME = new Type(4, "TSK_PROG_NAME", bundle.getString("BlackboardAttribute.tskProgName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_VALUE = new Type(6, "TSK_VALUE", bundle.getString("BlackboardAttribute.tskValue.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_FLAG = new Type(7, "TSK_FLAG", bundle.getString("BlackboardAttribute.tskFlag.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PATH = new Type(8, "TSK_PATH", bundle.getString("BlackboardAttribute.tskPath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_KEYWORD = new Type(10, "TSK_KEYWORD", bundle.getString("BlackboardAttribute.tskKeyword.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_KEYWORD_REGEXP = new Type(11, "TSK_KEYWORD_REGEXP", bundle.getString("BlackboardAttribute.tskKeywordRegexp.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_KEYWORD_PREVIEW = new Type(12, "TSK_KEYWORD_PREVIEW", bundle.getString("BlackboardAttribute.tskKeywordPreview.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+
+		// TSK_KEYWORD_SET (id: 13) has been deprecated.  Please use TSK_SET_NAME instead.
+		public static final Type TSK_USER_NAME = new Type(14, "TSK_USER_NAME", bundle.getString("BlackboardAttribute.tskUserName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DOMAIN = new Type(15, "TSK_DOMAIN", bundle.getString("BlackboardAttribute.tskDomain.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PASSWORD = new Type(16, "TSK_PASSWORD", bundle.getString("BlackboardAttribute.tskPassword.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_NAME_PERSON = new Type(17, "TSK_NAME_PERSON", bundle.getString("BlackboardAttribute.tskNamePerson.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DEVICE_MODEL = new Type(18, "TSK_DEVICE_MODEL", bundle.getString("BlackboardAttribute.tskDeviceModel.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DEVICE_MAKE = new Type(19, "TSK_DEVICE_MAKE", bundle.getString("BlackboardAttribute.tskDeviceMake.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DEVICE_ID = new Type(20, "TSK_DEVICE_ID", bundle.getString("BlackboardAttribute.tskDeviceId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL = new Type(21, "TSK_EMAIL", bundle.getString("BlackboardAttribute.tskEmail.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_HASH_MD5 = new Type(22, "TSK_HASH_MD5", bundle.getString("BlackboardAttribute.tskHashMd5.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_HASH_SHA1 = new Type(23, "TSK_HASH_SHA1", bundle.getString("BlackboardAttribute.tskHashSha1.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_HASH_SHA2_256 = new Type(24, "TSK_HASH_SHA2_256", bundle.getString("BlackboardAttribute.tskHashSha225.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_HASH_SHA2_512 = new Type(25, "TSK_HASH_SHA2_512", bundle.getString("BlackboardAttribute.tskHashSha2512.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_TEXT = new Type(26, "TSK_TEXT", bundle.getString("BlackboardAttribute.tskText.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_TEXT_FILE = new Type(27, "TSK_TEXT_FILE", bundle.getString("BlackboardAttribute.tskTextFile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_TEXT_LANGUAGE = new Type(28, "TSK_TEXT_LANGUAGE", bundle.getString("BlackboardAttribute.tskTextLanguage.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_ENTROPY = new Type(29, "TSK_ENTROPY", bundle.getString("BlackboardAttribute.tskEntropy.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+
+		// TSK_HASHSET_NAME (id: 30) has been deprecated.  Please use TSK_SET_NAME instead.
+		// TSK_INTERESTING_FILE (id: 31) has been deprecated.  Please use TSK_INTERESTING_FILE_HIT instead.
+		public static final Type TSK_REFERRER = new Type(32, "TSK_REFERRER", bundle.getString("BlackboardAttribute.tskReferrer.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DATETIME_ACCESSED = new Type(33, "TSK_DATETIME_ACCESSED", bundle.getString("BlackboardAttribute.tskDateTimeAccessed.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_IP_ADDRESS = new Type(34, "TSK_IP_ADDRESS", bundle.getString("BlackboardAttribute.tskIpAddress.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PHONE_NUMBER = new Type(35, "TSK_PHONE_NUMBER", bundle.getString("BlackboardAttribute.tskPhoneNumber.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PATH_ID = new Type(36, "TSK_PATH_ID", bundle.getString("BlackboardAttribute.tskPathId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG);
+		public static final Type TSK_SET_NAME = new Type(37, "TSK_SET_NAME", bundle.getString("BlackboardAttribute.tskSetName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+
+		// TSK_ENCRYPTION_DETECTED (id: 38) has been deprecated.  Please use TSK_ENCRYPTION_DETECTED as an artifact.
+		public static final Type TSK_MALWARE_DETECTED = new Type(39, "TSK_MALWARE_DETECTED", bundle.getString("BlackboardAttribute.tskMalwareDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER);
+		public static final Type TSK_STEG_DETECTED = new Type(40, "TSK_STEG_DETECTED", bundle.getString("BlackboardAttribute.tskStegDetected.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER);
+		public static final Type TSK_EMAIL_TO = new Type(41, "TSK_EMAIL_TO", bundle.getString("BlackboardAttribute.tskEmailTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_CC = new Type(42, "TSK_EMAIL_CC", bundle.getString("BlackboardAttribute.tskEmailCc.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_BCC = new Type(43, "TSK_EMAIL_BCC", bundle.getString("BlackboardAttribute.tskEmailBcc.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_FROM = new Type(44, "TSK_EMAIL_FROM", bundle.getString("BlackboardAttribute.tskEmailFrom.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_CONTENT_PLAIN = new Type(45, "TSK_EMAIL_CONTENT_PLAIN", bundle.getString("BlackboardAttribute.tskEmailContentPlain.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_CONTENT_HTML = new Type(46, "TSK_EMAIL_CONTENT_HTML", bundle.getString("BlackboardAttribute.tskEmailContentHtml.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_CONTENT_RTF = new Type(47, "TSK_EMAIL_CONTENT_RTF", bundle.getString("BlackboardAttribute.tskEmailContentRtf.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_MSG_ID = new Type(48, "TSK_MSG_ID", bundle.getString("BlackboardAttribute.tskMsgId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_MSG_REPLY_ID = new Type(49, "TSK_MSG_REPLY_ID", bundle.getString("BlackboardAttribute.tskMsgReplyId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DATETIME_RCVD = new Type(50, "TSK_DATETIME_RCVD", bundle.getString("BlackboardAttribute.tskDateTimeRcvd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_DATETIME_SENT = new Type(51, "TSK_DATETIME_SENT", bundle.getString("BlackboardAttribute.tskDateTimeSent.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_SUBJECT = new Type(52, "TSK_SUBJECT", bundle.getString("BlackboardAttribute.tskSubject.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_TITLE = new Type(53, "TSK_TITLE", bundle.getString("BlackboardAttribute.tskTitle.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_GEO_LATITUDE = new Type(54, "TSK_GEO_LATITUDE", bundle.getString("BlackboardAttribute.tskGeoLatitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_GEO_LONGITUDE = new Type(55, "TSK_GEO_LONGITUDE", bundle.getString("BlackboardAttribute.tskGeoLongitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_GEO_VELOCITY = new Type(56, "TSK_GEO_VELOCITY", bundle.getString("BlackboardAttribute.tskGeoVelocity.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_GEO_ALTITUDE = new Type(57, "TSK_GEO_ALTITUDE", bundle.getString("BlackboardAttribute.tskGeoAltitude.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_GEO_BEARING = new Type(58, "TSK_GEO_BEARING", bundle.getString("BlackboardAttribute.tskGeoBearing.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_GEO_HPRECISION = new Type(59, "TSK_GEO_HPRECISION", bundle.getString("BlackboardAttribute.tskGeoHPrecision.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_GEO_VPRECISION = new Type(60, "TSK_GEO_VPRECISION", bundle.getString("BlackboardAttribute.tskGeoVPrecision.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_GEO_MAPDATUM = new Type(61, "TSK_GEO_MAPDATUM", bundle.getString("BlackboardAttribute.tskGeoMapDatum.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+
+		// TSK_FILE_TYPE_SIG (id: 62) has been deprecated.  Please use the mime type field of the AbstractFile object instead.
+		public static final Type TSK_FILE_TYPE_EXT = new Type(63, "TSK_FILE_TYPE_EXT", bundle.getString("BlackboardAttribute.tskFileTypeExt.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+
+		// TSK_TAGGED_ARTIFACT (id: 64) has been deprected.  Please create a tag as an artifact.
+		// TSK_TAG_NAME (id: 65) has been deprecated.  Please create a tag as an artifact.
+		public static final Type TSK_COMMENT = new Type(66, "TSK_COMMENT", bundle.getString("BlackboardAttribute.tskComment.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_URL_DECODED = new Type(67, "TSK_URL_DECODED", bundle.getString("BlackboardAttribute.tskUrlDecoded.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DATETIME_CREATED = new Type(68, "TSK_DATETIME_CREATED", bundle.getString("BlackboardAttribute.tskDateTimeCreated.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_DATETIME_MODIFIED = new Type(69, "TSK_DATETIME_MODIFIED", bundle.getString("BlackboardAttribute.tskDateTimeModified.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_PROCESSOR_ARCHITECTURE = new Type(70, "TSK_PROCESSOR_ARCHITECTURE", bundle.getString("BlackboardAttribute.tskProcessorArchitecture.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_VERSION = new Type(71, "TSK_VERSION", bundle.getString("BlackboardAttribute.tskVersion.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_USER_ID = new Type(72, "TSK_USER_ID", bundle.getString("BlackboardAttribute.tskUserId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DESCRIPTION = new Type(73, "TSK_DESCRIPTION", bundle.getString("BlackboardAttribute.tskDescription.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_MESSAGE_TYPE = new Type(74, "TSK_MESSAGE_TYPE", bundle.getString("BlackboardAttribute.tskMessageType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // SMS or MMS or IM ...
+		public static final Type TSK_PHONE_NUMBER_HOME = new Type(75, "TSK_PHONE_NUMBER_HOME", bundle.getString("BlackboardAttribute.tskPhoneNumberHome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PHONE_NUMBER_OFFICE = new Type(76, "TSK_PHONE_NUMBER_OFFICE", bundle.getString("BlackboardAttribute.tskPhoneNumberOffice.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PHONE_NUMBER_MOBILE = new Type(77, "TSK_PHONE_NUMBER_MOBILE", bundle.getString("BlackboardAttribute.tskPhoneNumberMobile.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PHONE_NUMBER_FROM = new Type(78, "TSK_PHONE_NUMBER_FROM", bundle.getString("BlackboardAttribute.tskPhoneNumberFrom.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PHONE_NUMBER_TO = new Type(79, "TSK_PHONE_NUMBER_TO", bundle.getString("BlackboardAttribute.tskPhoneNumberTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DIRECTION = new Type(80, "TSK_DIRECTION", bundle.getString("BlackboardAttribute.tskDirection.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Msg/Call direction: incoming, outgoing
+		public static final Type TSK_EMAIL_HOME = new Type(81, "TSK_EMAIL_HOME", bundle.getString("BlackboardAttribute.tskEmailHome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_EMAIL_OFFICE = new Type(82, "TSK_EMAIL_OFFICE", bundle.getString("BlackboardAttribute.tskEmailOffice.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_DATETIME_START = new Type(83, "TSK_DATETIME_START", bundle.getString("BlackboardAttribute.tskDateTimeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); // start time of an event - call log, Calendar entry
+		public static final Type TSK_DATETIME_END = new Type(84, "TSK_DATETIME_END", bundle.getString("BlackboardAttribute.tskDateTimeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME); // end time of an event - call log, Calendar entry
+		public static final Type TSK_CALENDAR_ENTRY_TYPE = new Type(85, "TSK_CALENDAR_ENTRY_TYPE", bundle.getString("BlackboardAttribute.tskCalendarEntryType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // meeting, task,
+		public static final Type TSK_LOCATION = new Type(86, "TSK_LOCATION", bundle.getString("BlackboardAttribute.tskLocation.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Location string associated with an event - Conf Room Name, Address ....
+		public static final Type TSK_SHORTCUT = new Type(87, "TSK_SHORTCUT", bundle.getString("BlackboardAttribute.tskShortcut.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Short Cut string - short code or dial string for Speed dial, a URL short cut - e.g. bitly string, Windows Desktop Short cut name etc.
+		public static final Type TSK_DEVICE_NAME = new Type(88, "TSK_DEVICE_NAME", bundle.getString("BlackboardAttribute.tskDeviceName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // device name - a user assigned (usually) device name - such as "Joe's computer", "bob_win8", "BT Headset"
+		public static final Type TSK_CATEGORY = new Type(89, "TSK_CATEGORY", bundle.getString("BlackboardAttribute.tskCategory.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // category/type, possible value set varies by the artifact
+		public static final Type TSK_EMAIL_REPLYTO = new Type(90, "TSK_EMAIL_REPLYTO", bundle.getString("BlackboardAttribute.tskEmailReplyTo.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // ReplyTo address
+		public static final Type TSK_SERVER_NAME = new Type(91, "TSK_SERVER_NAME", bundle.getString("BlackboardAttribute.tskServerName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // server name, e.g. a mail server name - "smtp.google.com", a DNS server name...
+		public static final Type TSK_COUNT = new Type(92, "TSK_COUNT", bundle.getString("BlackboardAttribute.tskCount.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); // Count related to the artifact
+		public static final Type TSK_MIN_COUNT = new Type(93, "TSK_MIN_COUNT", bundle.getString("BlackboardAttribute.tskMinCount.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); // Minimum number/count
+		public static final Type TSK_PATH_SOURCE = new Type(94, "TSK_PATH_SOURCE", bundle.getString("BlackboardAttribute.tskPathSource.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Path to a source file related to the artifact
+		public static final Type TSK_PERMISSIONS = new Type(95, "TSK_PERMISSIONS", bundle.getString("BlackboardAttribute.tskPermissions.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Permissions
+		public static final Type TSK_ASSOCIATED_ARTIFACT = new Type(96, "TSK_ASSOCIATED_ARTIFACT", bundle.getString("BlackboardAttribute.tskAssociatedArtifact.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG); // Artifact ID of a related artifact
+		public static final Type TSK_ISDELETED = new Type(97, "TSK_ISDELETED", bundle.getString("BlackboardAttribute.tskIsDeleted.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // boolean to indicate that the artifact is recovered fom deleted content
+		public static final Type TSK_GEO_LATITUDE_START = new Type(98, "TSK_GEO_LATITUDE_START", bundle.getString("BlackboardAttribute.tskGeoLatitudeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // Starting location lattitude
+		public static final Type TSK_GEO_LATITUDE_END = new Type(99, "TSK_GEO_LATITUDE_END", bundle.getString("BlackboardAttribute.tskGeoLatitudeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // Ending location lattitude
+		public static final Type TSK_GEO_LONGITUDE_START = new Type(100, "TSK_GEO_LONGITUDE_START", bundle.getString("BlackboardAttribute.tskGeoLongitudeStart.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); // Starting location longitude
+		public static final Type TSK_GEO_LONGITUDE_END = new Type(101, "TSK_GEO_LONGITUDE_END", bundle.getString("BlackboardAttribute.tskGeoLongitudeEnd.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE); //Ending Location longitude
+		public static final Type TSK_READ_STATUS = new Type(102, "TSK_READ_STATUS", bundle.getString("BlackboardAttribute.tskReadStatus.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER); // Message read status: 1 if read, 0 if unread
+		public static final Type TSK_LOCAL_PATH = new Type(103, "TSK_LOCAL_PATH", bundle.getString("BlackboardAttribute.tskLocalPath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Local path to a network drive
+		public static final Type TSK_REMOTE_PATH = new Type(104, "TSK_REMOTE_PATH", bundle.getString("BlackboardAttribute.tskRemotePath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Remote path of a network drive
+		public static final Type TSK_TEMP_DIR = new Type(105, "TSK_TEMP_DIR", bundle.getString("BlackboardAttribute.tskTempDir.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Default temporary files directory
+		public static final Type TSK_PRODUCT_ID = new Type(106, "TSK_PRODUCT_ID", bundle.getString("BlackboardAttribute.tskProductId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Product ID
+		public static final Type TSK_OWNER = new Type(107, "TSK_OWNER", bundle.getString("BlackboardAttribute.tskOwner.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Registered owner of a piece of software
+		public static final Type TSK_ORGANIZATION = new Type(108, "TSK_ORGANIZATION", bundle.getString("BlackboardAttribute.tskOrganization.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING); // Registered Organization for a piece of software
+		public static final Type TSK_CARD_NUMBER = new Type(109, "TSK_CARD_NUMBER", bundle.getString("BlackboardAttribute.tskCardNumber.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CARD_EXPIRATION = new Type(110, "TSK_CARD_EXPIRATION", bundle.getString("BlackboardAttribute.tskCardExpiration.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CARD_SERVICE_CODE = new Type(111, "TSK_CARD_SERVICE_CODE", bundle.getString("BlackboardAttribute.tskCardServiceCode.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CARD_DISCRETIONARY = new Type(112, "TSK_CARD_DISCRETIONARY", bundle.getString("BlackboardAttribute.tskCardDiscretionary.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CARD_LRC = new Type(113, "TSK_CARD_LRC", bundle.getString("BlackboardAttribute.tskCardLRC.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_KEYWORD_SEARCH_DOCUMENT_ID = new Type(114, "TSK_KEYWORD_SEARCH_DOCUMENT_ID", bundle.getString("BlackboardAttribute.tskKeywordSearchDocumentID.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CARD_SCHEME = new Type(115, "TSK_CARD_SCHEME", bundle.getString("BlackboardAttribute.tskCardScheme.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CARD_TYPE = new Type(116, "TSK_CARD_TYPE", bundle.getString("BlackboardAttribute.tskCardType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_BRAND_NAME = new Type(117, "TSK_BRAND_NAME", bundle.getString("BlackboardAttribute.tskBrandName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_BANK_NAME = new Type(118, "TSK_BANK_NAME", bundle.getString("BlackboardAttribute.tskBankName.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_COUNTRY = new Type(119, "TSK_COUNTRY", bundle.getString("BlackboardAttribute.tskCountry.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_CITY = new Type(120, "TSK_CITY", bundle.getString("BlackboardAttribute.tskCity.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_ACCOUNT_TYPE = new Type(121, "TSK_ACCOUNT_TYPE", bundle.getString("BlackboardAttribute.tskAccountType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+
+		/**
+		 * Keyword search type: exact match, sub-string, or regex.
+		 */
+		public static final Type TSK_KEYWORD_SEARCH_TYPE = new Type(122, "TSK_KEYWORD_SEARCH_TYPE", bundle.getString("BlackboardAttribute.tskKeywordSearchType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER);
+		public static final Type TSK_HEADERS = new Type(123, "TSK_HEADERS", bundle.getString("BlackboardAttribute.tskHeaders.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_ID = new Type(124, "TSK_ID", bundle.getString("BlackboardAttribute.tskId.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_SSID = new Type(125, "TSK_SSID", bundle.getString("BlackboardAttribute.tskSsid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_BSSID = new Type(126, "TSK_BSSID", bundle.getString("BlackboardAttribute.tskBssid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_MAC_ADDRESS = new Type(127, "TSK_MAC_ADDRESS", bundle.getString("BlackboardAttribute.tskMacAddress.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_IMEI = new Type(128, "TSK_IMEI", bundle.getString("BlackboardAttribute.tskImei.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_IMSI = new Type(129, "TSK_IMSI", bundle.getString("BlackboardAttribute.tskImsi.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_ICCID = new Type(130, "TSK_ICCID", bundle.getString("BlackboardAttribute.tskIccid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_THREAD_ID = new Type(131, "TSK_THREAD_ID", bundle.getString("BlackboardAttribute.tskthreadid.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		/**
+		 * The event type of a TSK_TL_EVENT artifact. The value should be the id
+		 * of the EventType in the tsk_event_types table.
+		 */
+		public static final Type TSK_TL_EVENT_TYPE = new Type(132, "TSK_TL_EVENT_TYPE", bundle.getString("BlackboardAttribute.tskTLEventType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG);
+		public static final Type TSK_DATETIME_DELETED = new Type(133, "TSK_DATETIME_DELETED", bundle.getString("BlackboardAttribute.tskdatetimedeleted.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_DATETIME_PASSWORD_RESET = new Type(134, "TSK_DATETIME_PASSWORD_RESET", bundle.getString("BlackboardAttribute.tskdatetimepwdreset.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_DATETIME_PASSWORD_FAIL = new Type(135, "TSK_DATETIME_PWD_FAIL", bundle.getString("BlackboardAttribute.tskdatetimepwdfail.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_DISPLAY_NAME = new Type(136, "TSK_DISPLAY_NAME", bundle.getString("BlackboardAttribute.tskdisplayname.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PASSWORD_SETTINGS = new Type(137, "TSK_PASSWORD_SETTINGS", bundle.getString("BlackboardAttribute.tskpasswordsettings.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_ACCOUNT_SETTINGS = new Type(138, "TSK_ACCOUNT_SETTINGS", bundle.getString("BlackboardAttribute.tskaccountsettings.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_PASSWORD_HINT = new Type(139, "TSK_PASSWORD_HINT", bundle.getString("BlackboardAttribute.tskpasswordhint.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_GROUPS = new Type(140, "TSK_GROUPS", bundle.getString("BlackboardAttribute.tskgroups.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		/*
+		 * Use
+		 * org.sleuthkit.datamodel.blackboardutils.attributes.MessageAttachments
+		 * to create and process TSK_ATTACHMENTS attributes.
+		 */
+		public static final Type TSK_ATTACHMENTS = new Type(141, "TSK_ATTACHMENTS", bundle.getString("BlackboardAttribute.tskattachments.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON);
+		/*
+		 * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoTrackPoints
+		 * to create and process TSK_GEO_TRACKPOINTS attributes.
+		 */
+		public static final Type TSK_GEO_TRACKPOINTS = new Type(142, "TSK_GEO_TRACKPOINTS", bundle.getString("BlackboardAttribute.tskgeopath.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON);
+		/*
+		 * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoWaypoints
+		 * to create and process TSK_GEO_WAYPOINTS attributes.
+		 */
+		public static final Type TSK_GEO_WAYPOINTS = new Type(143, "TSK_GEO_WAYPOINTS", bundle.getString("BlackboardAttribute.tskgeowaypoints.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON);
+		public static final Type TSK_DISTANCE_TRAVELED = new Type(144, "TSK_DISTANCE_TRAVELED", bundle.getString("BlackboardAttribute.tskdistancetraveled.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_DISTANCE_FROM_HOMEPOINT = new Type(145, "TSK_DISTANCE_FROM_HOMEPOINT", bundle.getString("BlackboardAttribute.tskdistancefromhome.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE);
+		public static final Type TSK_HASH_PHOTODNA = new Type(146, "TSK_HASH_PHOTODNA", bundle.getString("BlackboardAttribute.tskhashphotodna.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_BYTES_SENT = new Type(147, "TSK_BYTES_SENT", bundle.getString("BlackboardAttribute.tskbytessent.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG);
+		public static final Type TSK_BYTES_RECEIVED = new Type(148, "TSK_BYTES_RECEIVED", bundle.getString("BlackboardAttribute.tskbytesreceived.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG);
+		public static final Type TSK_LAST_PRINTED_DATETIME = new Type(149, "TSK_LAST_PRINTED_DATETIME", bundle.getString("BlackboardAttribute.tsklastprinteddatetime.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+		public static final Type TSK_RULE = new Type(150, "TSK_RULE", bundle.getString("BlackboardAttribute.tskrule.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_ACTIVITY_TYPE = new Type(151, "TSK_ACTIVITY_TYPE", bundle.getString("BlackboardAttribute.tskActivityType.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		/*
+		 * Use org.sleuthkit.datamodel.blackboardutils.attributes.GeoAreaPoints
+		 * to create and process TSK_GEO_AREAPOINTS attributes.
+		 */
+		public static final Type TSK_GEO_AREAPOINTS = new Type(152, "TSK_GEO_AREAPOINTS", bundle.getString("BlackboardAttribute.tskgeoareapoints.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON);
+		public static final Type TSK_REALM = new Type(153, "TSK_REALM", bundle.getString("BlackboardAttribute.tskRealm.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_HOST = new Type(154, "TSK_HOST", bundle.getString("BlackboardAttribute.tskHost.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_HOME_DIR = new Type(155, "TSK_HOME_DIR", bundle.getString("BlackboardAttribute.tskHomeDir.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
+		public static final Type TSK_IS_ADMIN = new Type(156, "TSK_IS_ADMIN", bundle.getString("BlackboardAttribute.tskIsAdmin.text"), TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER);
+		// NOTE: When adding a new standard BlackboardAttribute.Type, add the instance and then add to the STANDARD_TYPES list.
+		/**
+		 * A list of all the standard attribute types.
+		 */
+		
+		static final List<Type> STANDARD_TYPES = Collections.unmodifiableList(Arrays.asList(
+				TSK_URL,
+				TSK_DATETIME,
+				TSK_NAME,
+				TSK_PROG_NAME,
+				TSK_VALUE,
+				TSK_FLAG,
+				TSK_PATH,
+				TSK_KEYWORD,
+				TSK_KEYWORD_REGEXP,
+				TSK_KEYWORD_PREVIEW,
+				TSK_USER_NAME,
+				TSK_DOMAIN,
+				TSK_PASSWORD,
+				TSK_NAME_PERSON,
+				TSK_DEVICE_MODEL,
+				TSK_DEVICE_MAKE,
+				TSK_DEVICE_ID,
+				TSK_EMAIL,
+				TSK_HASH_MD5,
+				TSK_HASH_SHA1,
+				TSK_HASH_SHA2_256,
+				TSK_HASH_SHA2_512,
+				TSK_TEXT,
+				TSK_TEXT_FILE,
+				TSK_TEXT_LANGUAGE,
+				TSK_ENTROPY,
+				TSK_REFERRER,
+				TSK_DATETIME_ACCESSED,
+				TSK_IP_ADDRESS,
+				TSK_PHONE_NUMBER,
+				TSK_PATH_ID,
+				TSK_SET_NAME,
+				TSK_MALWARE_DETECTED,
+				TSK_STEG_DETECTED,
+				TSK_EMAIL_TO,
+				TSK_EMAIL_CC,
+				TSK_EMAIL_BCC,
+				TSK_EMAIL_FROM,
+				TSK_EMAIL_CONTENT_PLAIN,
+				TSK_EMAIL_CONTENT_HTML,
+				TSK_EMAIL_CONTENT_RTF,
+				TSK_MSG_ID,
+				TSK_MSG_REPLY_ID,
+				TSK_DATETIME_RCVD,
+				TSK_DATETIME_SENT,
+				TSK_SUBJECT,
+				TSK_TITLE,
+				TSK_GEO_LATITUDE,
+				TSK_GEO_LONGITUDE,
+				TSK_GEO_VELOCITY,
+				TSK_GEO_ALTITUDE,
+				TSK_GEO_BEARING,
+				TSK_GEO_HPRECISION,
+				TSK_GEO_VPRECISION,
+				TSK_GEO_MAPDATUM,
+				TSK_FILE_TYPE_EXT,
+				TSK_COMMENT,
+				TSK_URL_DECODED,
+				TSK_DATETIME_CREATED,
+				TSK_DATETIME_MODIFIED,
+				TSK_PROCESSOR_ARCHITECTURE,
+				TSK_VERSION,
+				TSK_USER_ID,
+				TSK_DESCRIPTION,
+				TSK_MESSAGE_TYPE,
+				TSK_PHONE_NUMBER_HOME,
+				TSK_PHONE_NUMBER_OFFICE,
+				TSK_PHONE_NUMBER_MOBILE,
+				TSK_PHONE_NUMBER_FROM,
+				TSK_PHONE_NUMBER_TO,
+				TSK_DIRECTION,
+				TSK_EMAIL_HOME,
+				TSK_EMAIL_OFFICE,
+				TSK_DATETIME_START,
+				TSK_DATETIME_END,
+				TSK_CALENDAR_ENTRY_TYPE,
+				TSK_LOCATION,
+				TSK_SHORTCUT,
+				TSK_DEVICE_NAME,
+				TSK_CATEGORY,
+				TSK_EMAIL_REPLYTO,
+				TSK_SERVER_NAME,
+				TSK_COUNT,
+				TSK_MIN_COUNT,
+				TSK_PATH_SOURCE,
+				TSK_PERMISSIONS,
+				TSK_ASSOCIATED_ARTIFACT,
+				TSK_ISDELETED,
+				TSK_GEO_LATITUDE_START,
+				TSK_GEO_LATITUDE_END,
+				TSK_GEO_LONGITUDE_START,
+				TSK_GEO_LONGITUDE_END,
+				TSK_READ_STATUS,
+				TSK_LOCAL_PATH,
+				TSK_REMOTE_PATH,
+				TSK_TEMP_DIR,
+				TSK_PRODUCT_ID,
+				TSK_OWNER,
+				TSK_ORGANIZATION,
+				TSK_CARD_NUMBER,
+				TSK_CARD_EXPIRATION,
+				TSK_CARD_SERVICE_CODE,
+				TSK_CARD_DISCRETIONARY,
+				TSK_CARD_LRC,
+				TSK_KEYWORD_SEARCH_DOCUMENT_ID,
+				TSK_CARD_SCHEME,
+				TSK_CARD_TYPE,
+				TSK_BRAND_NAME,
+				TSK_BANK_NAME,
+				TSK_COUNTRY,
+				TSK_CITY,
+				TSK_ACCOUNT_TYPE,
+				TSK_KEYWORD_SEARCH_TYPE,
+				TSK_HEADERS,
+				TSK_ID,
+				TSK_SSID,
+				TSK_BSSID,
+				TSK_MAC_ADDRESS,
+				TSK_IMEI,
+				TSK_IMSI,
+				TSK_ICCID,
+				TSK_THREAD_ID,
+				TSK_TL_EVENT_TYPE,
+				TSK_DATETIME_DELETED,
+				TSK_DATETIME_PASSWORD_RESET,
+				TSK_DATETIME_PASSWORD_FAIL,
+				TSK_DISPLAY_NAME,
+				TSK_PASSWORD_SETTINGS,
+				TSK_ACCOUNT_SETTINGS,
+				TSK_PASSWORD_HINT,
+				TSK_GROUPS,
+				TSK_ATTACHMENTS,
+				TSK_GEO_TRACKPOINTS,
+				TSK_GEO_WAYPOINTS,
+				TSK_DISTANCE_TRAVELED,
+				TSK_DISTANCE_FROM_HOMEPOINT,
+				TSK_HASH_PHOTODNA,
+				TSK_BYTES_SENT,
+				TSK_BYTES_RECEIVED,
+				TSK_LAST_PRINTED_DATETIME,
+				TSK_RULE,
+				TSK_ACTIVITY_TYPE,
+				TSK_GEO_AREAPOINTS,
+				TSK_REALM,
+				TSK_HOST,
+				TSK_HOME_DIR,
+				TSK_IS_ADMIN
+		));
+
 		private static final long serialVersionUID = 1L;
 		private final String typeName;
 		private final int typeID;
@@ -1443,7 +1524,12 @@ public enum ATTRIBUTE_TYPE {
 		TSK_HOST(154, "TSK_HOST",
 				bundle.getString("BlackboardAttribute.tskHost.text"),
 				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),
-		;
+		TSK_HOME_DIR(155, "TSK_HOME_DIR",
+				bundle.getString("BlackboardAttribute.tskHomeDir.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),
+		TSK_IS_ADMIN(156, "TSK_IS_ADMIN",
+				bundle.getString("BlackboardAttribute.tskIsAdmin.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER),;
 
 		private final int typeID;
 		private final String typeName;
@@ -1845,7 +1931,7 @@ String getContextString() {
 	 */
 	@Deprecated
 	public int getAttributeTypeID() {
-		return attributeType.getTypeID();
+		return getAttributeType().getTypeID();
 	}
 
 	/**
@@ -1859,7 +1945,7 @@ public int getAttributeTypeID() {
 	 */
 	@Deprecated
 	public String getAttributeTypeName() throws TskCoreException {
-		return attributeType.getTypeName();
+		return getAttributeType().getTypeName();
 	}
 
 	/**
@@ -1874,7 +1960,7 @@ public String getAttributeTypeName() throws TskCoreException {
 	 */
 	@Deprecated
 	public String getAttributeTypeDisplayName() throws TskCoreException {
-		return attributeType.getDisplayName();
+		return getAttributeType().getDisplayName();
 	}
 
 	/**
@@ -1887,7 +1973,7 @@ public String getAttributeTypeDisplayName() throws TskCoreException {
 	 */
 	@Deprecated
 	public String getModuleName() {
-		return sources;
+		return getSourcesCSV();
 	}
 
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties b/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties
index 5c3cab6722aa84df1482c684994d6ea1034224a2..08f80085abc599e1a553eece27e8e4a5c1119486 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties
+++ b/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties
@@ -214,6 +214,8 @@ BlackboardAttribute.tskrule.text = Rule
 BlackboardAttribute.tskActivityType.text=Activity Type
 BlackboardAttribute.tskRealm.text=Realm
 BlackboardAttribute.tskHost.text=Host
+BlackboardAttribute.tskHomeDir.text=Home Directory
+BlackboardAttribute.tskIsAdmin.text=Is Administrator
 AbstractFile.readLocal.exception.msg4.text=Error reading local file\: {0}
 AbstractFile.readLocal.exception.msg1.text=Error reading local file, local path is not set
 AbstractFile.readLocal.exception.msg2.text=Error reading local file, it does not exist at local path\: {0}
@@ -314,12 +316,14 @@ IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName=Data Source Level
 ReviewStatus.Approved=Approved
 ReviewStatus.Rejected=Rejected
 ReviewStatus.Undecided=Undecided
+CategoryType.DataArtifact=Data Artifact
+CategoryType.AnalysisResult=Analysis Result
 TimelineLevelOfDetail.low=Low
 TimelineLevelOfDetail.medium=Medium
 TimelineLevelOfDetail.high=High
 BaseTypes.fileSystem.name=File System
 BaseTypes.webActivity.name=Web Activity
-BaseTypes.miscTypes.name=Misc Types
+BaseTypes.miscTypes.name=Miscellaneous
 FileSystemTypes.fileModified.name=File Modified
 FileSystemTypes.fileAccessed.name=File Accessed
 FileSystemTypes.fileCreated.name=File Created
@@ -327,10 +331,12 @@ FileSystemTypes.fileChanged.name=File Changed
 MiscTypes.message.name=Messages
 MiscTypes.GPSRoutes.name=GPS Routes
 MiscTypes.GPSTrackpoint.name=GPS Trackpoint
-MiscTypes.Calls.name=Calls
-MiscTypes.Email.name=Email
+MiscTypes.Calls.name=Call Begin
+MiscTypes.CallsEnd.name=Call End
+MiscTypes.Email.name=Email Sent
+MiscTypes.EmailRcvd.name=Email Received
 MiscTypes.recentDocuments.name=Recent Documents
-MiscTypes.installedPrograms.name=Installed Programs
+MiscTypes.installedPrograms.name=Program Installed
 MiscTypes.exif.name=Exif
 MiscTypes.devicesAttached.name=Devices Attached
 MiscTypes.LogEntry.name=Log Entry
@@ -342,15 +348,21 @@ MiscTypes.GPSTrack.name=GPS Track
 MiscTypes.metadataLastPrinted.name=Document Last Printed
 MiscTypes.metadataLastSaved.name=Document Last Saved
 MiscTypes.metadataCreated.name=Document Created
-MiscTypes.programexecuted.name=Program Execution
+MiscTypes.programexecuted.name=Program Run
 RootEventType.eventTypes.name=Event Types
 WebTypes.webDownloads.name=Web Downloads
-WebTypes.webCookies.name=Web Cookies
+WebTypes.webCookies.name=Web Cookies Create
+WebTypes.webCookiesAccessed.name=Web Cookies Accessed
+WebTypes.webCookiesStart.name=Web Cookies Start
+WebTypes.webCookiesEnd.name=Web Cookies End
 WebTypes.webBookmarks.name=Web Bookmarks
-WebTypes.webHistory.name=Web History
+WebTypes.webHistory.name=Web History Accessed
+WebTypes.webHistoryCreated.name=Web History Created
 WebTypes.webSearch.name=Web Searches
-WebTypes.webFormAutoFill.name=Web Form Autofill
-WebTypes.webFormAddress.name=Web Form Address
+WebTypes.webFormAutoFill.name=Web Form Autofill Created
+WebTypes.webFormAddress.name=Web Form Address Created
+WebTypes.webFormAddressModified.name=Web Form Address Modified
+WebTypes.webFormAutofillAccessed.name=Web Form Autofill Accessed
 CustomTypes.other.name=Standard Types
 CustomTypes.userCreated.name=Custom Types
 BaseTypes.customTypes.name=Other
@@ -366,4 +378,55 @@ IntersectionFilter.displayName.text=Intersection
 tagsFilter.displayName.text=Must be tagged
 TextFilter.displayName.text=Must include text:
 TypeFilter.displayName.text=Limit event types to
-FileTypesFilter.displayName.text=Limit file types to
\ No newline at end of file
+FileTypesFilter.displayName.text=Limit file types to
+OsAccountStatus.Unknown.text=Unknown
+OsAccountStatus.Active.text=Active
+OsAccountStatus.Disabled.text=Disabled
+OsAccountStatus.Deleted.text=Deleted
+OsAccountType.Unknown.text=Unknown
+OsAccountType.Service.text=Service
+OsAccountType.Interactive.text=Interactive
+OsAccountInstanceType.Launched.text=Launched
+OsAccountInstanceType.Accessed.text=Accessed
+OsAccountInstanceType.Referenced.text=Referenced
+OsAccountInstanceType.Launched.descr.text=Account owner launched a program action on the host.
+OsAccountInstanceType.Accessed.descr.text=Account owner accessed resources on the host for read/write via some service.
+OsAccountInstanceType.Referenced.descr.text=Account owner was referenced in a log file on the host.
+OsAccountRealm.Known.text=Known
+OsAccountRealm.Inferred.text=Inferred
+OsAccountRealm.Unknown.text=Unknown
+OsAccountRealm.Local.text=Local
+OsAccountRealm.Domain.text=Domain
+Score.Priority.Normal.displayName.text=Normal
+Score.Priority.Override.displayName.text=Override
+Significance.Unknown.displayName.text=Unknown
+Significance.LikelyNone.displayName.text=Likely Not Notable
+Significance.LikelyNotable.displayName.text=Likely Notable
+Significance.None.displayName.text=Not Notable
+Significance.Notable.displayName.text=Notable
+TimelineEventType.BackupEventStart.txt=Backup Begin
+TimelineEventType.BackupEventEnd.txt=Backup End
+TimelineEventType.BackupEvent.description.start=Backup Begin
+TimelineEventType.BackupEvent.description.end=Backup End
+TimelineEventType.BluetoothPairingLastConnection.txt=Bluetooth Pairing Last Connection
+TimelineEventType.BluetoothPairing.txt=Bluetooth Pairing
+TimelineEventType.CalendarEntryStart.txt=Calendar Entry Begin
+TimelineEventType.CalendarEntryEnd.txt=Calendar Entry End
+TimelineEventType.DeletedProgram.txt=Program Deleted 
+TimelineEventType.DeletedProgramDeleted.txt=Application Deleted
+TimelineEventType.OSAccountAccessed.txt=Operating System Account Accessed
+TimelineEventType.OSAccountCreated.txt=Operating System Account Created
+TimelineEventType.OSAccountPwdFail.txt=Operating System Account Password Fail
+TimelineEventType.OSAccountPwdReset.txt=Operating System Account Password Reset
+TimelineEventType.OSInfo.txt=Operating System Information
+TimelineEventType.ProgramNotification.txt=Program Notification
+TimelineEventType.ScreenShot.txt=Screen Shot
+TimelineEventType.UserDeviceEventStart.txt=User Activity Begin
+TimelineEventType.UserDeviceEventEnd.txt=User Activity End
+TimelineEventType.ServiceAccount.txt=Service Account
+TimelineEventType.WIFINetwork.txt=Wifi Network
+TimelineEventType.WebCache.text=Web Cache
+TimelineEventType.BluetoothAdapter.txt=Bluetooth Adapter
+BaseTypes.geolocation.name=Geolocation
+BaseTypes.communication.name=Communication
+
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties-MERGED b/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties-MERGED
index 5c3cab6722aa84df1482c684994d6ea1034224a2..08f80085abc599e1a553eece27e8e4a5c1119486 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties-MERGED
+++ b/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties-MERGED
@@ -214,6 +214,8 @@ BlackboardAttribute.tskrule.text = Rule
 BlackboardAttribute.tskActivityType.text=Activity Type
 BlackboardAttribute.tskRealm.text=Realm
 BlackboardAttribute.tskHost.text=Host
+BlackboardAttribute.tskHomeDir.text=Home Directory
+BlackboardAttribute.tskIsAdmin.text=Is Administrator
 AbstractFile.readLocal.exception.msg4.text=Error reading local file\: {0}
 AbstractFile.readLocal.exception.msg1.text=Error reading local file, local path is not set
 AbstractFile.readLocal.exception.msg2.text=Error reading local file, it does not exist at local path\: {0}
@@ -314,12 +316,14 @@ IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName=Data Source Level
 ReviewStatus.Approved=Approved
 ReviewStatus.Rejected=Rejected
 ReviewStatus.Undecided=Undecided
+CategoryType.DataArtifact=Data Artifact
+CategoryType.AnalysisResult=Analysis Result
 TimelineLevelOfDetail.low=Low
 TimelineLevelOfDetail.medium=Medium
 TimelineLevelOfDetail.high=High
 BaseTypes.fileSystem.name=File System
 BaseTypes.webActivity.name=Web Activity
-BaseTypes.miscTypes.name=Misc Types
+BaseTypes.miscTypes.name=Miscellaneous
 FileSystemTypes.fileModified.name=File Modified
 FileSystemTypes.fileAccessed.name=File Accessed
 FileSystemTypes.fileCreated.name=File Created
@@ -327,10 +331,12 @@ FileSystemTypes.fileChanged.name=File Changed
 MiscTypes.message.name=Messages
 MiscTypes.GPSRoutes.name=GPS Routes
 MiscTypes.GPSTrackpoint.name=GPS Trackpoint
-MiscTypes.Calls.name=Calls
-MiscTypes.Email.name=Email
+MiscTypes.Calls.name=Call Begin
+MiscTypes.CallsEnd.name=Call End
+MiscTypes.Email.name=Email Sent
+MiscTypes.EmailRcvd.name=Email Received
 MiscTypes.recentDocuments.name=Recent Documents
-MiscTypes.installedPrograms.name=Installed Programs
+MiscTypes.installedPrograms.name=Program Installed
 MiscTypes.exif.name=Exif
 MiscTypes.devicesAttached.name=Devices Attached
 MiscTypes.LogEntry.name=Log Entry
@@ -342,15 +348,21 @@ MiscTypes.GPSTrack.name=GPS Track
 MiscTypes.metadataLastPrinted.name=Document Last Printed
 MiscTypes.metadataLastSaved.name=Document Last Saved
 MiscTypes.metadataCreated.name=Document Created
-MiscTypes.programexecuted.name=Program Execution
+MiscTypes.programexecuted.name=Program Run
 RootEventType.eventTypes.name=Event Types
 WebTypes.webDownloads.name=Web Downloads
-WebTypes.webCookies.name=Web Cookies
+WebTypes.webCookies.name=Web Cookies Create
+WebTypes.webCookiesAccessed.name=Web Cookies Accessed
+WebTypes.webCookiesStart.name=Web Cookies Start
+WebTypes.webCookiesEnd.name=Web Cookies End
 WebTypes.webBookmarks.name=Web Bookmarks
-WebTypes.webHistory.name=Web History
+WebTypes.webHistory.name=Web History Accessed
+WebTypes.webHistoryCreated.name=Web History Created
 WebTypes.webSearch.name=Web Searches
-WebTypes.webFormAutoFill.name=Web Form Autofill
-WebTypes.webFormAddress.name=Web Form Address
+WebTypes.webFormAutoFill.name=Web Form Autofill Created
+WebTypes.webFormAddress.name=Web Form Address Created
+WebTypes.webFormAddressModified.name=Web Form Address Modified
+WebTypes.webFormAutofillAccessed.name=Web Form Autofill Accessed
 CustomTypes.other.name=Standard Types
 CustomTypes.userCreated.name=Custom Types
 BaseTypes.customTypes.name=Other
@@ -366,4 +378,55 @@ IntersectionFilter.displayName.text=Intersection
 tagsFilter.displayName.text=Must be tagged
 TextFilter.displayName.text=Must include text:
 TypeFilter.displayName.text=Limit event types to
-FileTypesFilter.displayName.text=Limit file types to
\ No newline at end of file
+FileTypesFilter.displayName.text=Limit file types to
+OsAccountStatus.Unknown.text=Unknown
+OsAccountStatus.Active.text=Active
+OsAccountStatus.Disabled.text=Disabled
+OsAccountStatus.Deleted.text=Deleted
+OsAccountType.Unknown.text=Unknown
+OsAccountType.Service.text=Service
+OsAccountType.Interactive.text=Interactive
+OsAccountInstanceType.Launched.text=Launched
+OsAccountInstanceType.Accessed.text=Accessed
+OsAccountInstanceType.Referenced.text=Referenced
+OsAccountInstanceType.Launched.descr.text=Account owner launched a program action on the host.
+OsAccountInstanceType.Accessed.descr.text=Account owner accessed resources on the host for read/write via some service.
+OsAccountInstanceType.Referenced.descr.text=Account owner was referenced in a log file on the host.
+OsAccountRealm.Known.text=Known
+OsAccountRealm.Inferred.text=Inferred
+OsAccountRealm.Unknown.text=Unknown
+OsAccountRealm.Local.text=Local
+OsAccountRealm.Domain.text=Domain
+Score.Priority.Normal.displayName.text=Normal
+Score.Priority.Override.displayName.text=Override
+Significance.Unknown.displayName.text=Unknown
+Significance.LikelyNone.displayName.text=Likely Not Notable
+Significance.LikelyNotable.displayName.text=Likely Notable
+Significance.None.displayName.text=Not Notable
+Significance.Notable.displayName.text=Notable
+TimelineEventType.BackupEventStart.txt=Backup Begin
+TimelineEventType.BackupEventEnd.txt=Backup End
+TimelineEventType.BackupEvent.description.start=Backup Begin
+TimelineEventType.BackupEvent.description.end=Backup End
+TimelineEventType.BluetoothPairingLastConnection.txt=Bluetooth Pairing Last Connection
+TimelineEventType.BluetoothPairing.txt=Bluetooth Pairing
+TimelineEventType.CalendarEntryStart.txt=Calendar Entry Begin
+TimelineEventType.CalendarEntryEnd.txt=Calendar Entry End
+TimelineEventType.DeletedProgram.txt=Program Deleted 
+TimelineEventType.DeletedProgramDeleted.txt=Application Deleted
+TimelineEventType.OSAccountAccessed.txt=Operating System Account Accessed
+TimelineEventType.OSAccountCreated.txt=Operating System Account Created
+TimelineEventType.OSAccountPwdFail.txt=Operating System Account Password Fail
+TimelineEventType.OSAccountPwdReset.txt=Operating System Account Password Reset
+TimelineEventType.OSInfo.txt=Operating System Information
+TimelineEventType.ProgramNotification.txt=Program Notification
+TimelineEventType.ScreenShot.txt=Screen Shot
+TimelineEventType.UserDeviceEventStart.txt=User Activity Begin
+TimelineEventType.UserDeviceEventEnd.txt=User Activity End
+TimelineEventType.ServiceAccount.txt=Service Account
+TimelineEventType.WIFINetwork.txt=Wifi Network
+TimelineEventType.WebCache.text=Web Cache
+TimelineEventType.BluetoothAdapter.txt=Bluetooth Adapter
+BaseTypes.geolocation.name=Geolocation
+BaseTypes.communication.name=Communication
+
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Bundle_ja.properties b/bindings/java/src/org/sleuthkit/datamodel/Bundle_ja.properties
index acef59d809f82431d8060becd932004c6cc95eed..164f2f3cfbc2e60e4f74f68826d777f6377cff4d 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Bundle_ja.properties
+++ b/bindings/java/src/org/sleuthkit/datamodel/Bundle_ja.properties
@@ -1,12 +1,14 @@
-#Tue Feb 16 13:50:31 UTC 2021
+#Thu Jul 01 12:01:30 UTC 2021
 AbstractFile.readLocal.exception.msg1.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9\u304c\u30bb\u30c3\u30c8\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002
 AbstractFile.readLocal.exception.msg2.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u4e0b\u8a18\u306e\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9\u306b\u306f\u5b58\u5728\u3057\u307e\u305b\u3093\uff1a{0}
 AbstractFile.readLocal.exception.msg3.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u4e0b\u8a18\u306e\u30ed\u30fc\u30ab\u30eb\u30d1\u30b9\u3067\u306f\u8aad\u307f\u53d6\u308a\u3067\u304d\u307e\u305b\u3093\uff1a{0}
 AbstractFile.readLocal.exception.msg4.text=\u30d5\u30a1\u30a4\u30eb{0}\u306e\u8aad\u307f\u53d6\u308a\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f
 AbstractFile.readLocal.exception.msg5.text=\u30ed\u30fc\u30ab\u30eb\u30d5\u30a1\u30a4\u30eb{0}\u3092\u8aad\u307f\u53d6\u308c\u307e\u305b\u3093
+BaseTypes.communication.name=\u30b3\u30df\u30e5\u30cb\u30b1\u30fc\u30b7\u30e7\u30f3
 BaseTypes.customTypes.name=\u305d\u306e\u4ed6
 BaseTypes.fileSystem.name=\u30d5\u30a1\u30a4\u30eb\u30b7\u30b9\u30c6\u30e0
-BaseTypes.miscTypes.name=\u305d\u306e\u4ed6\u306e\u30bf\u30a4\u30d7
+BaseTypes.geolocation.name=\u30b8\u30aa\u30ed\u30b1\u30fc\u30b7\u30e7\u30f3
+BaseTypes.miscTypes.name=\u305d\u306e\u4ed6
 BaseTypes.webActivity.name=Web\u30a2\u30af\u30c6\u30a3\u30d3\u30c6\u30a3
 BlackboardArtifact.shortDescriptionDate.text=\u3067{0}
 BlackboardArtifact.tagFile.text=\u30bf\u30b0\u4ed8\u3051\u3055\u308c\u305f\u30d5\u30a1\u30a4\u30eb
@@ -145,6 +147,7 @@ BlackboardAttribute.tskHashSha225.text=SHA2-256\u30cf\u30c3\u30b7\u30e5
 BlackboardAttribute.tskHashSha2512.text=SHA2-512\u30cf\u30c3\u30b7\u30e5
 BlackboardAttribute.tskHashsetName.text=\u30cf\u30c3\u30b7\u30e5\u30bb\u30c3\u30c8\u540d
 BlackboardAttribute.tskHeaders.text=\u30d8\u30c3\u30c0\u30fc
+BlackboardAttribute.tskHomeDir.text=\u30db\u30fc\u30e0\u30c7\u30a3\u30ec\u30af\u30c8\u30ea
 BlackboardAttribute.tskHost.text=\u30db\u30b9\u30c8
 BlackboardAttribute.tskIccid.text=ICCID
 BlackboardAttribute.tskId.text=ID
@@ -152,6 +155,7 @@ BlackboardAttribute.tskImei.text=IMEI
 BlackboardAttribute.tskImsi.text=IMSI
 BlackboardAttribute.tskInterestingFile.text=\u7591\u308f\u3057\u3044\u30d5\u30a1\u30a4\u30eb
 BlackboardAttribute.tskIpAddress.text=IP\u30a2\u30c9\u30ec\u30b9
+BlackboardAttribute.tskIsAdmin.text=\u7ba1\u7406\u8005\u3067\u3059
 BlackboardAttribute.tskIsDeleted.text=\u306f\u524a\u9664\u3055\u308c\u307e\u3057\u305f
 BlackboardAttribute.tskKeyword.text=\u30ad\u30fc\u30ef\u30fc\u30c9
 BlackboardAttribute.tskKeywordPreview.text=\u30ad\u30fc\u30ef\u30fc\u30c9\u30d7\u30ec\u30d3\u30e5\u30fc
@@ -230,6 +234,8 @@ BlackboardAttribute.tskpasswordhint.text=\u30d1\u30b9\u30ef\u30fc\u30c9\u306e\u3
 BlackboardAttribute.tskpasswordsettings.text=\u30d1\u30b9\u30ef\u30fc\u30c9\u8a2d\u5b9a
 BlackboardAttribute.tskrule.text=\u30eb\u30fc\u30eb
 BlackboardAttribute.tskthreadid.text=\u30b9\u30ec\u30c3\u30c9ID
+CategoryType.AnalysisResult=\u5206\u6790\u7d50\u679c
+CategoryType.DataArtifact=\u30c7\u30fc\u30bf\u30a2\u30fc\u30c6\u30a3\u30d5\u30a1\u30af\u30c8
 CustomTypes.other.name=\u6a19\u6e96\u30bf\u30a4\u30d7
 CustomTypes.userCreated.name=\u30ab\u30b9\u30bf\u30e0\u30bf\u30a4\u30d7
 DataSourcesFilter.displayName.text=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u3092\u306b\u5236\u9650\u3059\u308b
@@ -276,8 +282,10 @@ IngestJobInfo.IngestJobStatusType.Started.displayName=\u958b\u59cb
 IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName=\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u30ec\u30d9\u30eb
 IngestModuleInfo.IngestModuleType.FileLevel.displayName=\u30d5\u30a1\u30a4\u30eb\u30ec\u30d9\u30eb
 IntersectionFilter.displayName.text=\u4ea4\u5dee\u70b9
-MiscTypes.Calls.name=\u30b3\u30fc\u30eb
-MiscTypes.Email.name=E\u30e1\u30fc\u30eb
+MiscTypes.Calls.name=\u901a\u8a71\u958b\u59cb
+MiscTypes.CallsEnd.name=\u901a\u8a71\u7d42\u4e86
+MiscTypes.Email.name=\u9001\u4fe1\u3055\u308c\u305f\u96fb\u5b50\u30e1\u30fc\u30eb
+MiscTypes.EmailRcvd.name=\u30e1\u30fc\u30eb\u306e\u53d7\u4fe1
 MiscTypes.GPSBookmark.name=GPS\u30d6\u30c3\u30af\u30de\u30fc\u30af
 MiscTypes.GPSLastknown.name=GPS\u8a18\u9332\u306e\u6700\u5f8c\u5834\u6240
 MiscTypes.GPSRoutes.name=GPS\u30eb\u30fc\u30c8
@@ -288,17 +296,42 @@ MiscTypes.LogEntry.name=\u30ed\u30b0\u767b\u9332
 MiscTypes.Registry.name=\u30ec\u30b8\u30b9\u30c8\u30ea
 MiscTypes.devicesAttached.name=\u63a5\u7d9a\u3055\u308c\u3066\u3044\u308b\u30c7\u30d0\u30a4\u30b9
 MiscTypes.exif.name=Exif
-MiscTypes.installedPrograms.name=\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u3055\u308c\u3066\u3044\u308b\u30d7\u30ed\u30b0\u30e9\u30e0
+MiscTypes.installedPrograms.name=\u30a4\u30f3\u30b9\u30c8\u30fc\u30eb\u6e08\u307f\u306e\u30d7\u30ed\u30b0\u30e9\u30e0
 MiscTypes.message.name=\u30e1\u30c3\u30bb\u30fc\u30b8
 MiscTypes.metadataCreated.name=\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8\u4f5c\u6210
 MiscTypes.metadataLastPrinted.name=\u6700\u7d42\u5370\u5237\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8
 MiscTypes.metadataLastSaved.name=\u6700\u5f8c\u306b\u4fdd\u5b58\u3055\u308c\u305f\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8
 MiscTypes.programexecuted.name=\u30d7\u30ed\u30b0\u30e9\u30e0\u306e\u5b9f\u884c
 MiscTypes.recentDocuments.name=\u6700\u8fd1\u306e\u30c9\u30ad\u30e5\u30e1\u30f3\u30c8
+OsAccountInstanceType.Accessed.descr.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u6240\u6709\u8005\u306f\u3001\u4f55\u3089\u304b\u306e\u30b5\u30fc\u30d3\u30b9\u3092\u4ecb\u3057\u3066\u8aad\u307f/\u66f8\u304d\u306b\u30db\u30b9\u30c8\u4e0a\u306e\u30ea\u30bd\u30fc\u30b9\u306b\u30a2\u30af\u30bb\u30b9\u3057\u307e\u3057\u305f\u3002
+OsAccountInstanceType.Accessed.text=\u30a2\u30af\u30bb\u30b9\u6e08\u307f
+OsAccountInstanceType.Launched.descr.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u6240\u6709\u8005\u306f\u3001\u30db\u30b9\u30c8\u3067\u30d7\u30ed\u30b0\u30e9\u30e0\u306e\u30a2\u30af\u30b7\u30e7\u30f3\u3092\u958b\u59cb\u3057\u307e\u3057\u305f\u3002
+OsAccountInstanceType.Launched.text=\u4f5c\u52d5
+OsAccountInstanceType.Referenced.descr.text=\u30a2\u30ab\u30a6\u30f3\u30c8\u6240\u6709\u8005\u306f\u3001\u30db\u30b9\u30c8\u4e0a\u306e\u30ed\u30b0\u30d5\u30a1\u30a4\u30eb\u3067\u53c2\u7167\u3055\u308c\u307e\u3057\u305f\u3002
+OsAccountInstanceType.Referenced.text=\u53c2\u7167
+OsAccountRealm.Domain.text=\u30c9\u30e1\u30a4\u30f3
+OsAccountRealm.Inferred.text=\u63a8\u6e2c
+OsAccountRealm.Known.text=\u65e2\u77e5
+OsAccountRealm.Local.text=\u30ed\u30fc\u30ab\u30eb
+OsAccountRealm.Unknown.text=\u4e0d\u660e
+OsAccountStatus.Active.text=\u30a2\u30af\u30c6\u30a3\u30d6
+OsAccountStatus.Deleted.text=\u524a\u9664\u6e08\u307f
+OsAccountStatus.Disabled.text=\u7121\u52b9
+OsAccountStatus.Unknown.text=\u4e0d\u660e
+OsAccountType.Interactive.text=\u30a4\u30f3\u30bf\u30e9\u30af\u30c6\u30a3\u30d6
+OsAccountType.Service.text=\u30b5\u30fc\u30d3\u30b9
+OsAccountType.Unknown.text=\u4e0d\u660e
 ReviewStatus.Approved=\u627f\u8a8d\u6e08\u307f
 ReviewStatus.Rejected=\u62d2\u5426\u3055\u308c\u307e\u3057\u305f
 ReviewStatus.Undecided=\u672a\u5b9a
 RootEventType.eventTypes.name=\u30a4\u30d9\u30f3\u30c8\u30bf\u30a4\u30d7
+Score.Priority.Normal.displayName.text=\u6b63\u5e38
+Score.Priority.Override.displayName.text=\u30aa\u30fc\u30d0\u30fc\u30e9\u30a4\u30c9
+Significance.LikelyNone.displayName.text=\u304a\u305d\u3089\u304f\u6ce8\u76ee\u306b\u5024\u3057\u306a\u3044
+Significance.LikelyNotable.displayName.text=\u304a\u305d\u3089\u304f\u6ce8\u76ee\u306b\u5024\u3059\u308b
+Significance.None.displayName.text=\u6ce8\u76ee\u306b\u5024\u3057\u306a\u3044
+Significance.Notable.displayName.text=\u6ce8\u76ee\u3059\u3079\u304d
+Significance.Unknown.displayName.text=\u4e0d\u660e
 SlackFile.readInt.err.msg.text=\u753b\u50cf\u30d5\u30a1\u30a4\u30eb\u304c\u5b58\u5728\u3057\u306a\u3044\u304b\u3001\u30a2\u30af\u30bb\u30b9\u3067\u304d\u307e\u305b\u3093\u3002
 SleuthkitCase.SchemaVersionMismatch=\u30b9\u30ad\u30fc\u30de\u306e\u30d0\u30fc\u30b8\u30e7\u30f3\u304c\u4e00\u81f4\u3057\u307e\u305b\u3093
 SleuthkitCase.addDerivedFile.exception.msg1.text=\u6d3e\u751f\u30d5\u30a1\u30a4\u30eb\u306e\u4f5c\u6210\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002\u30aa\u30d6\u30b8\u30a7\u30af\u30c8\u306e\u65b0\u898fID\u3092\u53d6\u5f97\u3067\u304d\u307e\u305b\u3093\u3002\u30d5\u30a1\u30a4\u30eb\u540d\uff1a{0}
@@ -316,6 +349,29 @@ SleuthkitCase.getLastObjectId.exception.msg.text=\u6700\u5f8c\u306e\u30aa\u30d6\
 SleuthkitCase.isFileFromSource.exception.msg.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30da\u30a2\u30ec\u30f3\u30c8\u304c\u7121\u3044\uff08\u30a4\u30e1\u30fc\u30b8\u3001\u30d5\u30a1\u30a4\u30eb\u30bb\u30c3\u30c8\uff09\u306f\u305a\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0}
 SleuthkitCase.isFileFromSource.exception.msg2.text=\u30a8\u30e9\u30fc\uff1a\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u306f\u30a4\u30e1\u30fc\u30b8\u307e\u305f\u306fVirtualDirectory\u3067\u3042\u308b\u3079\u304d\u3067\u3059\u304c\u3001\u4e0b\u8a18\u304c\u5b58\u5728\u3057\u307e\u3059\uff1a{0}
 TextFilter.displayName.text=\u30c6\u30ad\u30b9\u30c8\u3092\u542b\u3081\u308b\u5fc5\u8981\u304c\u3042\u308a\u307e\u3059\uff1a
+TimelineEventType.BackupEvent.description.end=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u7d42\u4e86
+TimelineEventType.BackupEvent.description.start=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u958b\u59cb
+TimelineEventType.BackupEventEnd.txt=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u7d42\u4e86
+TimelineEventType.BackupEventStart.txt=\u30d0\u30c3\u30af\u30a2\u30c3\u30d7\u958b\u59cb
+TimelineEventType.BluetoothAdapter.txt=Bluetooth\u30a2\u30c0\u30d7\u30bf\u30fc
+TimelineEventType.BluetoothPairing.txt=Bluetooth\u30da\u30a2\u30ea\u30f3\u30b0
+TimelineEventType.BluetoothPairingLastConnection.txt=\u6700\u5f8c\u306e\u63a5\u7d9a\u3092Bluetooth\u3067\u30da\u30a2\u30ea\u30f3\u30b0
+TimelineEventType.CalendarEntryEnd.txt=\u30ab\u30ec\u30f3\u30c0\u30fc\u5165\u529b\u7d42\u4e86
+TimelineEventType.CalendarEntryStart.txt=\u30ab\u30ec\u30f3\u30c0\u30fc\u5165\u529b\u958b\u59cb
+TimelineEventType.DeletedProgram.txt=\u30d7\u30ed\u30b0\u30e9\u30e0\u304c\u524a\u9664\u3055\u308c\u307e\u3057\u305f
+TimelineEventType.DeletedProgramDeleted.txt=\u30a2\u30d7\u30ea\u30b1\u30fc\u30b7\u30e7\u30f3\u304c\u524a\u9664\u3055\u308c\u307e\u3057\u305f
+TimelineEventType.OSAccountAccessed.txt=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305fOS\u30a2\u30ab\u30a6\u30f3\u30c8
+TimelineEventType.OSAccountCreated.txt=\u30c6\u30ad\u30b9\u30c8\u30a4\u30f3\u30c7\u30c3\u30af\u30b9\u304b\u3089\u30c7\u30fc\u30bf\u30bd\u30fc\u30b9\u3092\u524a\u9664\u4e2d\u306b\u30a8\u30e9\u30fc\u304c\u767a\u751f\u3057\u307e\u3057\u305f\u3002
+TimelineEventType.OSAccountPwdFail.txt=OS\u30a2\u30ab\u30a6\u30f3\u30c8\u306e\u30d1\u30b9\u30ef\u30fc\u30c9\u30fb\u30a8\u30e9\u30fc
+TimelineEventType.OSAccountPwdReset.txt=OS\u30a2\u30ab\u30a6\u30f3\u30c8\u306e\u30d1\u30b9\u30ef\u30fc\u30c9\u306e\u30ea\u30bb\u30c3\u30c8
+TimelineEventType.OSInfo.txt=\u30aa\u30da\u30ec\u30fc\u30c6\u30a3\u30f3\u30b0\u30b7\u30b9\u30c6\u30e0\u60c5\u5831
+TimelineEventType.ProgramNotification.txt=\u30d7\u30ed\u30b0\u30e9\u30e0\u901a\u77e5
+TimelineEventType.ScreenShot.txt=\u30b9\u30af\u30ea\u30fc\u30f3\u30b7\u30e7\u30c3\u30c8
+TimelineEventType.ServiceAccount.txt=\u30b5\u30fc\u30d3\u30b9\u30fb\u30a2\u30ab\u30a6\u30f3\u30c8
+TimelineEventType.UserDeviceEventEnd.txt=\u30e6\u30fc\u30b6\u30fc\u6d3b\u52d5\u7d42\u4e86
+TimelineEventType.UserDeviceEventStart.txt=\u30e6\u30fc\u30b6\u30fc\u6d3b\u52d5\u958b\u59cb
+TimelineEventType.WIFINetwork.txt=Wifi\u30cd\u30c3\u30c8\u30ef\u30fc\u30af
+TimelineEventType.WebCache.text=Web\u30ad\u30e3\u30c3\u30b7\u30e5
 TimelineLevelOfDetail.high=\u9ad8
 TimelineLevelOfDetail.low=\u4f4e
 TimelineLevelOfDetail.medium=\u4e2d
@@ -365,11 +421,17 @@ Volume.read.exception.msg1.text=\u3053\u306e\u30dc\u30ea\u30e5\u30fc\u30e0\u306e
 Volume.vsFlagToString.allocated=\u5272\u308a\u5f53\u3066\u6e08\u307f
 Volume.vsFlagToString.unallocated=\u672a\u5272\u308a\u5f53\u3066
 WebTypes.webBookmarks.name=Web\u30d6\u30c3\u30af\u30de\u30fc\u30af
-WebTypes.webCookies.name=Web\u30af\u30c3\u30ad\u30fc
+WebTypes.webCookies.name=WebCookie\u3092\u4f5c\u6210
+WebTypes.webCookiesAccessed.name=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305fWebCookie
+WebTypes.webCookiesEnd.name=WebCookie\u304c\u7d42\u4e86
+WebTypes.webCookiesStart.name=WebCookie\u306e\u958b\u59cb
 WebTypes.webDownloads.name=Web\u30c0\u30a6\u30f3\u30ed\u30fc\u30c9
-WebTypes.webFormAddress.name=Web\u30d5\u30a9\u30fc\u30e0\u30a2\u30c9\u30ec\u30b9
-WebTypes.webFormAutoFill.name=Web\u30d5\u30a9\u30fc\u30e0\u306e\u81ea\u52d5\u5165\u529b
-WebTypes.webHistory.name=Web\u5c65\u6b74
+WebTypes.webFormAddress.name=Web\u30d5\u30a9\u30fc\u30e0\u30a2\u30c9\u30ec\u30b9\u304c\u4f5c\u6210\u3055\u308c\u307e\u3057\u305f
+WebTypes.webFormAddressModified.name=Web\u30d5\u30a9\u30fc\u30e0\u30a2\u30c9\u30ec\u30b9\u304c\u5909\u66f4\u3055\u308c\u307e\u3057\u305f
+WebTypes.webFormAutoFill.name=Web\u30d5\u30a9\u30fc\u30e0\u306e\u81ea\u52d5\u5165\u529b\u304c\u4f5c\u6210\u3055\u308c\u307e\u3057\u305f
+WebTypes.webFormAutofillAccessed.name=Web\u30d5\u30a9\u30fc\u30e0\u306e\u81ea\u52d5\u5165\u529b\u306b\u30a2\u30af\u30bb\u30b9
+WebTypes.webHistory.name=\u30a2\u30af\u30bb\u30b9\u3055\u308c\u305fWeb\u5c65\u6b74
+WebTypes.webHistoryCreated.name=Web\u5c65\u6b74\u304c\u4f5c\u6210\u3055\u308c\u307e\u3057\u305f
 WebTypes.webSearch.name=Web\u691c\u7d22
 ZoomSettingsPane.descrLODLabel.text=\u8a73\u7d30\u8aac\u660e\uff1a
 ZoomSettingsPane.historyLabel.text=\u5c65\u6b74\uff1a
diff --git a/bindings/java/src/org/sleuthkit/datamodel/CaseDatabaseFactory.java b/bindings/java/src/org/sleuthkit/datamodel/CaseDatabaseFactory.java
index eca9630950c463208eac5a2b6e5c4367ecebbc3f..e6c08af852c8b49b830ef902e4d9cc853f9a4868 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/CaseDatabaseFactory.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/CaseDatabaseFactory.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2020 Basis Technology Corp.
+ * Copyright 2020-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -145,21 +145,30 @@ private void addDbInfo(Connection conn) throws TskCoreException {
 	 */
 	private void addTables(Connection conn) throws TskCoreException {
 		try (Statement stmt = conn.createStatement()) {
+			createTskObjects(stmt);
+			createHostTables(stmt);
+			createAccountTables(stmt);
 			createFileTables(stmt);
 			createArtifactTables(stmt);
+			createAnalysisResultsTables(stmt);
 			createTagTables(stmt);
 			createIngestTables(stmt);
-			createAccountTables(stmt);
 			createEventTables(stmt);
+			createAttributeTables(stmt);
+			createAccountInstancesAndArtifacts(stmt);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error initializing tables", ex);
 		}
 	}
 	
-	private void createFileTables(Statement stmt) throws SQLException {
+	// tsk_objects is referenced by many other tables and should be created first
+	private void createTskObjects(Statement stmt) throws SQLException {
 		// The UNIQUE here on the object ID is to create an index
 		stmt.execute("CREATE TABLE tsk_objects (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, par_obj_id " + dbQueryHelper.getBigIntType() 
 				+ ", type INTEGER NOT NULL, UNIQUE (obj_id), FOREIGN KEY (par_obj_id) REFERENCES tsk_objects (obj_id) ON DELETE CASCADE)");
+	}
+	
+	private void createFileTables(Statement stmt) throws SQLException {
 
 		stmt.execute("CREATE TABLE tsk_image_info (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, type INTEGER, ssize INTEGER, " 
 				+ "tzone TEXT, size " + dbQueryHelper.getBigIntType() + ", md5 TEXT, sha1 TEXT, sha256 TEXT, display_name TEXT, "
@@ -182,7 +191,11 @@ private void createFileTables(Statement stmt) throws SQLException {
 				+ "pool_type INTEGER NOT NULL, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE);");
 
 		stmt.execute("CREATE TABLE data_source_info (obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, device_id TEXT NOT NULL, "
-				+ "time_zone TEXT NOT NULL, acquisition_details TEXT, FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)");
+				+ "time_zone TEXT NOT NULL, acquisition_details TEXT, added_date_time "+ dbQueryHelper.getBigIntType() + ", "
+				+ "acquisition_tool_settings TEXT, acquisition_tool_name TEXT, acquisition_tool_version TEXT, "
+				+ "host_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id), "
+				+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)");
 
 		stmt.execute("CREATE TABLE tsk_fs_info (obj_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
 				+ "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
@@ -205,9 +218,12 @@ private void createFileTables(Statement stmt) throws SQLException {
 				+ "mtime " + dbQueryHelper.getBigIntType() + ", mode INTEGER, uid INTEGER, gid INTEGER, md5 TEXT, sha256 TEXT, "
 				+ "known INTEGER, "
 				+ "parent_path TEXT, mime_type TEXT, extension TEXT, "
+				+ "owner_uid TEXT DEFAULT NULL, "
+				+ "os_account_obj_id " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, "
 				+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
 				+ "FOREIGN KEY(fs_obj_id) REFERENCES tsk_fs_info(obj_id) ON DELETE CASCADE, "
-				+ "FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id) ON DELETE CASCADE)");
+				+ "FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE SET NULL) " ); 
 
 		stmt.execute("CREATE TABLE file_encoding_types (encoding_type INTEGER PRIMARY KEY, name TEXT NOT NULL)");
 
@@ -233,7 +249,8 @@ private void createFileTables(Statement stmt) throws SQLException {
 	
 	private void createArtifactTables(Statement stmt) throws SQLException {
 		stmt.execute("CREATE TABLE blackboard_artifact_types (artifact_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
-				+ "type_name TEXT NOT NULL, display_name TEXT)");
+				+ "type_name TEXT NOT NULL, display_name TEXT,"
+				+ "category_type INTEGER DEFAULT 0)");
 
 		stmt.execute("CREATE TABLE blackboard_attribute_types (attribute_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
 				+ "type_name TEXT NOT NULL, display_name TEXT, value_type INTEGER NOT NULL)");
@@ -245,9 +262,10 @@ private void createArtifactTables(Statement stmt) throws SQLException {
 		stmt.execute("CREATE TABLE blackboard_artifacts (artifact_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
 				+ "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
 				+ "artifact_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
-				+ "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "data_source_obj_id " + dbQueryHelper.getBigIntType() + ", "
 				+ "artifact_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
 				+ "review_status_id INTEGER NOT NULL, "
+				+ "UNIQUE (artifact_obj_id),"
 				+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
 				+ "FOREIGN KEY(artifact_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
 				+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
@@ -265,9 +283,29 @@ private void createArtifactTables(Statement stmt) throws SQLException {
 				+ "value_text TEXT, value_int32 INTEGER, value_int64 " + dbQueryHelper.getBigIntType() + ", value_double NUMERIC(20, 10), "
 				+ "FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id) ON DELETE CASCADE, "
 				+ "FOREIGN KEY(artifact_type_id) REFERENCES blackboard_artifact_types(artifact_type_id), "
-				+ "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))");		
+				+ "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))");	
 	}
 	
+	private void createAnalysisResultsTables(Statement stmt) throws SQLException  {
+		stmt.execute("CREATE TABLE tsk_analysis_results (artifact_obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, "
+				+ "conclusion TEXT, "
+				+ "significance INTEGER NOT NULL, "
+				+ "priority INTEGER NOT NULL, "
+				+ "configuration TEXT, justification TEXT, "
+				+ "ignore_score INTEGER DEFAULT 0, " // boolean	
+				+ "FOREIGN KEY(artifact_obj_id) REFERENCES blackboard_artifacts(artifact_obj_id) ON DELETE CASCADE"
+				+ ")");		
+		
+		stmt.execute("CREATE TABLE tsk_aggregate_score( obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, "
+				+ "data_source_obj_id " + dbQueryHelper.getBigIntType() + ", "
+				+ "significance INTEGER NOT NULL, "
+				+ "priority INTEGER NOT NULL, "
+				+ "UNIQUE (obj_id),"
+				+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE "
+				+ ")");	
+		
+	}
 	private void createTagTables(Statement stmt) throws SQLException {
 		stmt.execute("CREATE TABLE tsk_tag_sets (tag_set_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, name TEXT UNIQUE)");
 		stmt.execute("CREATE TABLE tag_names (tag_name_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, display_name TEXT UNIQUE, "
@@ -332,6 +370,14 @@ private void addIndexes(Connection conn) throws TskCoreException {
 			stmt.execute("CREATE INDEX events_artifact_id ON tsk_event_descriptions(artifact_id)");
 			stmt.execute("CREATE INDEX events_sub_type_time ON tsk_events(event_type_id,  time)");
 			stmt.execute("CREATE INDEX events_time ON tsk_events(time)");
+			
+			// analysis results and scores indices
+			stmt.execute("CREATE INDEX score_significance_priority ON tsk_aggregate_score(significance, priority)");
+			stmt.execute("CREATE INDEX score_datasource_obj_id ON tsk_aggregate_score(data_source_obj_id)");
+			
+			stmt.execute("CREATE INDEX tsk_file_attributes_obj_id ON tsk_file_attributes(obj_id)");
+			
+			
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error initializing db_info tables", ex);
 		}
@@ -359,15 +405,71 @@ private void createIngestTables(Statement stmt) throws SQLException {
 				+ "FOREIGN KEY(ingest_module_id) REFERENCES ingest_modules(ingest_module_id) ON DELETE CASCADE);");
 	}
 	
+	private void createHostTables(Statement stmt) throws SQLException {
+
+		stmt.execute("CREATE TABLE tsk_persons (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "name TEXT NOT NULL, " // person name
+				+ "UNIQUE(name)) ");
+		
+		// References tsk_persons
+		stmt.execute("CREATE TABLE tsk_hosts (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "name TEXT NOT NULL, " // host name
+				+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+				+ "person_id INTEGER, "
+				+ "merged_into " + dbQueryHelper.getBigIntType() + ", "
+				+ "FOREIGN KEY(person_id) REFERENCES tsk_persons(id) ON DELETE SET NULL, "
+				+ "FOREIGN KEY(merged_into) REFERENCES tsk_hosts(id) ON DELETE CASCADE, "
+				+ "UNIQUE(name)) ");
+
+		stmt.execute("CREATE TABLE  tsk_host_addresses (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "address_type INTEGER NOT NULL, "
+				+ "address TEXT NOT NULL, "
+				+ "UNIQUE(address_type, address)) ");
+
+		stmt.execute("CREATE TABLE tsk_host_address_map  (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "host_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "addr_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "source_obj_id " + dbQueryHelper.getBigIntType() + ", " // object id of the source where this mapping was found.
+				+ "time " + dbQueryHelper.getBigIntType() + ", " // time at which the mapping existed
+				+ "UNIQUE(host_id, addr_obj_id, time), "
+				+ "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id), "
+				+ "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )");
+
+		// stores associations between DNS name and IP address
+		stmt.execute("CREATE TABLE tsk_host_address_dns_ip_map (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "dns_address_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "ip_address_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "source_obj_id " + dbQueryHelper.getBigIntType() + ", "
+				+ "time " + dbQueryHelper.getBigIntType() + ", " // time at which the mapping existed
+				+ "UNIQUE(dns_address_id, ip_address_id, time), "
+				+ "FOREIGN KEY(dns_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(ip_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE,"
+				+ "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )");
+
+		// maps an address to an content/item using it 
+		stmt.execute("CREATE TABLE  tsk_host_address_usage (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "addr_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "	// obj id of the content/item using the address
+				+ "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " // data source where the usage was found
+				+ "UNIQUE(addr_obj_id, obj_id), "
+				+ "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )");		
+	}
+		
+	// Must be called after tsk_persons, tsk_hosts and tsk_objects have been created.
 	private void createAccountTables(Statement stmt) throws SQLException {
 		stmt.execute("CREATE TABLE account_types (account_type_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
 				+ "type_name TEXT UNIQUE NOT NULL, display_name TEXT NOT NULL)");
 
+		// References account_types
 		stmt.execute("CREATE TABLE accounts (account_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
 				+ "account_type_id INTEGER NOT NULL, account_unique_identifier TEXT NOT NULL, "
 				+ "UNIQUE(account_type_id, account_unique_identifier), "
 				+ "FOREIGN KEY(account_type_id) REFERENCES account_types(account_type_id))");
 
+		// References accounts, tsk_objects
 		stmt.execute("CREATE TABLE account_relationships (relationship_id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
 				+ "account1_id INTEGER NOT NULL, account2_id INTEGER NOT NULL, "
 				+ "relationship_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
@@ -378,6 +480,72 @@ private void createAccountTables(Statement stmt) throws SQLException {
 				+ "FOREIGN KEY(account2_id) REFERENCES accounts(account_id), "
 				+ "FOREIGN KEY(relationship_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
 				+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE)");
+		
+		// References tsk_hosts
+		stmt.execute("CREATE TABLE tsk_os_account_realms (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "realm_name TEXT DEFAULT NULL, "	// realm name - for a domain realm, may be null
+				+ "realm_addr TEXT DEFAULT NULL, "		// a sid/uid or some some other identifier, may be null
+				+ "realm_signature TEXT NOT NULL, "	// Signature exists only to prevent duplicates. It is  made up of realm address/name and scope host
+				+ "scope_host_id " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, " // if the realm scope is a single host
+				+ "scope_confidence INTEGER, "	// indicates whether we know for sure the realm scope or if we are inferring it				
+				+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+				+ "merged_into " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, "	
+				+ "UNIQUE(realm_signature), "
+				+ "FOREIGN KEY(scope_host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE,"
+				+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_account_realms(id) ON DELETE CASCADE )");
+		
+		// References tsk_objects, tsk_os_account_realms, tsk_persons
+		stmt.execute("CREATE TABLE tsk_os_accounts (os_account_obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, "
+				+ "login_name TEXT DEFAULT NULL, "	// login name, if available, may be null
+				+ "full_name TEXT DEFAULT NULL, "	// full name, if available, may be null
+				+ "realm_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "		// realm for the account 
+				+ "addr TEXT DEFAULT NULL, "	// SID/UID, if available
+				+ "signature TEXT NOT NULL, "	// This exists only to prevent duplicates.  It is either the addr or the login_name whichever is not null.
+				+ "status INTEGER, "    // enabled/disabled/deleted
+				+ "type INTEGER, "	// service/interactive
+				+ "created_date " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, "
+				+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+			    + "merged_into " + dbQueryHelper.getBigIntType() + " DEFAULT NULL, "
+				+ "UNIQUE(signature, realm_id), "
+				+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(realm_id) REFERENCES tsk_os_account_realms(id) ON DELETE CASCADE,"
+				+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE CASCADE )");
+		
+	}
+	// Must be called after createAccountTables() and blackboard_attribute_types, blackboard_artifacts creation.
+	private void createAccountInstancesAndArtifacts(Statement stmt) throws SQLException {
+		
+		// References tsk_os_accounts, tsk_hosts, tsk_objects, blackboard_attribute_types
+		stmt.execute("CREATE TABLE tsk_os_account_attributes (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "os_account_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "host_id " + dbQueryHelper.getBigIntType() + ", " 
+				+ "source_obj_id " + dbQueryHelper.getBigIntType() + ", " 	
+				+ "attribute_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "value_type INTEGER NOT NULL, "
+				+ "value_byte " + dbQueryHelper.getBlobType() + ", "
+				+ "value_text TEXT, "
+				+ "value_int32 INTEGER, value_int64 " + dbQueryHelper.getBigIntType() + ", "
+				+ "value_double NUMERIC(20, 10), "
+				+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE CASCADE, " 
+				+ "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL, "		
+				+ "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))");	
+		
+		// References tsk_os_accounts, tsk_objects, tsk_hosts
+		stmt.execute("CREATE TABLE tsk_os_account_instances (id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "os_account_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "data_source_obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, " 
+				+ "instance_type INTEGER NOT NULL, "	// PerformedActionOn/ReferencedOn
+				+ "UNIQUE(os_account_obj_id, data_source_obj_id), "
+				+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE CASCADE, " 
+				+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE ) ");
+		
+		// References blackboard_artifacts, tsk_os_accounts
+		stmt.execute("CREATE TABLE tsk_data_artifacts ( "
+				+ "artifact_obj_id " + dbQueryHelper.getBigIntType() + " PRIMARY KEY, "
+				+ "os_account_obj_id " + dbQueryHelper.getBigIntType() + ", "
+				+ "FOREIGN KEY(artifact_obj_id) REFERENCES blackboard_artifacts(artifact_obj_id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id) ON DELETE SET NULL) ");	
 	}
 	
 	private void createEventTables(Statement stmt) throws SQLException {
@@ -386,14 +554,6 @@ private void createEventTables(Statement stmt) throws SQLException {
 				+ " display_name TEXT UNIQUE NOT NULL , "
 				+ " super_type_id INTEGER REFERENCES tsk_event_types(event_type_id) )");
 
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(0, 'Event Types', null)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(1, 'File System', 0)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(2, 'Web Activity', 0)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(3, 'Misc Types', 0)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(4, 'Modified', 1)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(5, 'Accessed', 1)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(6, 'Created', 1)");
-		stmt.execute("INSERT INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES(7, 'Changed', 1)");
 		/*
 		* Regarding the timeline event tables schema, note that several columns
 		* in the tsk_event_descriptions table seem, at first glance, to be
@@ -430,6 +590,21 @@ private void createEventTables(Statement stmt) throws SQLException {
 			+ " time " + dbQueryHelper.getBigIntType() + " NOT NULL , "
 			+ " UNIQUE (event_type_id, event_description_id, time))");			
 	}
+
+	private void createAttributeTables(Statement stmt) throws SQLException {
+		/*
+		 * Binary representation of BYTEA is a bunch of bytes, which could
+		 * include embedded nulls so we have to pay attention to field length.
+		 * http://www.postgresql.org/docs/9.4/static/libpq-example.html
+		 */
+		stmt.execute("CREATE TABLE tsk_file_attributes ( id " + dbQueryHelper.getPrimaryKey() + " PRIMARY KEY, "
+				+ "obj_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "attribute_type_id " + dbQueryHelper.getBigIntType() + " NOT NULL, "
+				+ "value_type INTEGER NOT NULL, value_byte " + dbQueryHelper.getBlobType() + ", "
+				+ "value_text TEXT, value_int32 INTEGER, value_int64 " + dbQueryHelper.getBigIntType() + ", value_double NUMERIC(20, 10), "
+				+ "FOREIGN KEY(obj_id) REFERENCES tsk_files(obj_id) ON DELETE CASCADE, "
+				+ "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))");
+	}
 	
 	/**
 	 * Helper class for holding code unique to each database type.
@@ -529,6 +704,8 @@ Connection getConnection(String databaseName) throws TskCoreException {
 			StringBuilder url = new StringBuilder();
 			url.append(JDBC_BASE_URI)
 				.append(info.getHost())
+				.append(":")
+				.append(info.getPort())
 				.append('/') // NON-NLS
 				.append(encodedDbName);
 			
diff --git a/bindings/java/src/org/sleuthkit/datamodel/CaseDbAccessManager.java b/bindings/java/src/org/sleuthkit/datamodel/CaseDbAccessManager.java
index 1bf72c77351b042721c2fa4cef32807d6d4352d6..2736688a0455ef79eeced90359c2ac277ec37b2b 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/CaseDbAccessManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/CaseDbAccessManager.java
@@ -248,22 +248,16 @@ public void createTable(final String tableName, final String tableSchema) throws
 		validateTableName(tableName);
 		validateSQL(tableSchema);
 
-		CaseDbConnection connection = tskDB.getConnection();
 		tskDB.acquireSingleUserCaseWriteLock();
-
-		Statement statement = null;
 		String createSQL = "CREATE TABLE IF NOT EXISTS " + tableName + " " + tableSchema;
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = tskDB.getConnection();
+				Statement statement = connection.createStatement();) {
 			statement.execute(createSQL);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error creating table " + tableName, ex);
 		} finally {
-			closeStatement(statement);
-			connection.close();
 			tskDB.releaseSingleUserCaseWriteLock();
 		}
-
 	}
 
 	/**
@@ -350,19 +344,14 @@ public void createIndex(final String indexName, final String tableName, final St
 		validateIndexName(indexName);
 		validateSQL(colsSQL);
 
-		CaseDbConnection connection = tskDB.getConnection();
 		tskDB.acquireSingleUserCaseWriteLock();
-
-		Statement statement = null;
 		String indexSQL = "CREATE INDEX IF NOT EXISTS " + indexName + " ON " + tableName + " " + colsSQL; // NON-NLS
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = tskDB.getConnection();
+			Statement statement = connection.createStatement(); ) {
 			statement.execute(indexSQL);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error creating index " + tableName, ex);
 		} finally {
-			closeStatement(statement);
-			connection.close();
 			tskDB.releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -600,21 +589,15 @@ public void select(final String sql, final CaseDbAccessQueryCallback queryCallba
 		
 		validateSQL(sql);
 		
-		CaseDbConnection connection = tskDB.getConnection();
 		tskDB.acquireSingleUserCaseReadLock();
-
-		Statement statement = null;
-		ResultSet resultSet;
 		String selectSQL = "SELECT " +  sql; // NON-NLS
-		try {
-			statement = connection.createStatement();
-			resultSet = statement.executeQuery(selectSQL);
+		try (CaseDbConnection connection = tskDB.getConnection();
+			Statement statement = connection.createStatement();
+			ResultSet resultSet = statement.executeQuery(selectSQL)) {
 			queryCallback.process(resultSet);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error running SELECT query.", ex);
 		} finally {
-			closeStatement(statement);
-			connection.close();
 			tskDB.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -631,19 +614,14 @@ public void delete(final String tableName, final String sql ) throws TskCoreExce
 		validateTableName(tableName);
 		validateSQL(sql);
 
-		CaseDbConnection connection = tskDB.getConnection();
 		tskDB.acquireSingleUserCaseWriteLock();
-
-		Statement statement = null;
 		String deleteSQL = "DELETE FROM " + tableName + " " + sql; // NON-NLS
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = tskDB.getConnection();
+			Statement statement = connection.createStatement();) {
 			statement.executeUpdate(deleteSQL);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error deleting row from table " + tableName, ex);
 		} finally {
-			closeStatement(statement);
-			connection.close();
 			tskDB.releaseSingleUserCaseWriteLock();
 		}
 	}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java b/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java
index 5b1aaa05fd99b552781a4e35e92474933ba5cf7d..a5ff593837cc7fa8ce7b8166cef788c4786a70ab 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2017-2020 Basis Technology Corp.
+ * Copyright 2017-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -24,7 +24,6 @@
 import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -37,6 +36,7 @@
 import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
 import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
+import static org.sleuthkit.datamodel.SleuthkitCase.closeConnection;
 import static org.sleuthkit.datamodel.SleuthkitCase.closeResultSet;
 import static org.sleuthkit.datamodel.SleuthkitCase.closeStatement;
 
@@ -47,7 +47,7 @@
 public final class CommunicationsManager {
 
 	private static final Logger LOGGER = Logger.getLogger(CommunicationsManager.class.getName());
-
+	private static final BlackboardArtifact.Type ACCOUNT_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT);
 	private final SleuthkitCase db;
 
 	private final Map<Account.Type, Integer> accountTypeToTypeIdMap
@@ -84,13 +84,9 @@ public final class CommunicationsManager {
 	 *                          account types from the db.
 	 */
 	private void initAccountTypes() throws TskCoreException {
-		CaseDbConnection connection = db.getConnection();
 		db.acquireSingleUserCaseWriteLock();
-		Statement statement = null;
-		ResultSet resultSet = null;
-
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = db.getConnection();
+			Statement statement = connection.createStatement();) {
 			// Read the table
 			int count = readAccountTypes();
 			if (0 == count) {
@@ -99,30 +95,27 @@ private void initAccountTypes() throws TskCoreException {
 					try {
 						statement.execute("INSERT INTO account_types (type_name, display_name) VALUES ( '" + type.getTypeName() + "', '" + type.getDisplayName() + "')"); //NON-NLS
 					} catch (SQLException ex) {
-						resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM account_types WHERE type_name = '" + type.getTypeName() + "'"); //NON-NLS
-						resultSet.next();
-						if (resultSet.getLong("count") == 0) {
-							throw ex;
+						try (ResultSet resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM account_types WHERE type_name = '" + type.getTypeName() + "'")) { //NON-NLS
+							resultSet.next();
+							if (resultSet.getLong("count") == 0) {
+								throw ex;
+							}
 						}
-						resultSet.close();
 					}
 
-					ResultSet rs2 = connection.executeQuery(statement, "SELECT account_type_id FROM account_types WHERE type_name = '" + type.getTypeName() + "'"); //NON-NLS
-					rs2.next();
-					int typeID = rs2.getInt("account_type_id");
-					rs2.close();
+					try (ResultSet rs2 = connection.executeQuery(statement, "SELECT account_type_id FROM account_types WHERE type_name = '" + type.getTypeName() + "'")) { //NON-NLS
+						rs2.next();
+						int typeID = rs2.getInt("account_type_id");
 
-					Account.Type accountType = new Account.Type(type.getTypeName(), type.getDisplayName());
-					this.accountTypeToTypeIdMap.put(accountType, typeID);
-					this.typeNameToAccountTypeMap.put(type.getTypeName(), accountType);
+						Account.Type accountType = new Account.Type(type.getTypeName(), type.getDisplayName());
+						this.accountTypeToTypeIdMap.put(accountType, typeID);
+						this.typeNameToAccountTypeMap.put(type.getTypeName(), accountType);
+					}
 				}
 			}
 		} catch (SQLException ex) {
 			LOGGER.log(Level.SEVERE, "Failed to add row to account_types", ex);
 		} finally {
-			closeResultSet(resultSet);
-			closeStatement(statement);
-			connection.close();
 			db.releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -136,13 +129,14 @@ private void initAccountTypes() throws TskCoreException {
 	 * @throws TskCoreException if there is a problem reading the account types.
 	 */
 	private int readAccountTypes() throws TskCoreException {
-		CaseDbConnection connection = db.getConnection();
-		db.acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement statement = null;
 		ResultSet resultSet = null;
 		int count = 0;
-
+		
+		db.acquireSingleUserCaseReadLock();
 		try {
+			connection = db.getConnection();
 			statement = connection.createStatement();
 
 			// If the account_types table is already populated, say when opening a case,  then load it
@@ -165,7 +159,7 @@ private int readAccountTypes() throws TskCoreException {
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeConnection(connection);
 			db.releaseSingleUserCaseReadLock();
 		}
 
@@ -202,21 +196,19 @@ public org.sleuthkit.datamodel.Account.Type addAccountType(String accountTypeNam
 			return accountType;
 		}
 
-		CaseDbConnection connection = db.getConnection();
-		db.acquireSingleUserCaseWriteLock();
+		CaseDbTransaction trans = db.beginTransaction();
 		Statement s = null;
 		ResultSet rs = null;
 		try {
-			connection.beginTransaction();
-			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT * FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
+			s = trans.getConnection().createStatement();
+			rs = trans.getConnection().executeQuery(s, "SELECT * FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
 			if (!rs.next()) {
 				rs.close();
 
 				s.execute("INSERT INTO account_types (type_name, display_name) VALUES ( '" + accountTypeName + "', '" + displayName + "')"); //NON-NLS
 
 				// Read back the typeID
-				rs = connection.executeQuery(s, "SELECT * FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
+				rs = trans.getConnection().executeQuery(s, "SELECT * FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
 				rs.next();
 
 				int typeID = rs.getInt("account_type_id");
@@ -225,7 +217,7 @@ public org.sleuthkit.datamodel.Account.Type addAccountType(String accountTypeNam
 				this.accountTypeToTypeIdMap.put(accountType, typeID);
 				this.typeNameToAccountTypeMap.put(accountTypeName, accountType);
 
-				connection.commitTransaction();
+				trans.commit();
 
 				return accountType;
 			} else {
@@ -237,13 +229,11 @@ public org.sleuthkit.datamodel.Account.Type addAccountType(String accountTypeNam
 				return accountType;
 			}
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			trans.rollback();
 			throw new TskCoreException("Error adding account type", ex);
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
-			db.releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -304,14 +294,11 @@ public AccountFileInstance createAccountFileInstance(org.sleuthkit.datamodel.Acc
 	// NOTE: Full name given for Type for doxygen linking
 	public Account getAccount(org.sleuthkit.datamodel.Account.Type accountType, String accountUniqueID) throws TskCoreException, InvalidAccountIDException {
 		Account account = null;
-		CaseDbConnection connection = db.getConnection();
 		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-		try {
-			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT * FROM accounts WHERE account_type_id = " + getAccountTypeId(accountType)
-					+ " AND account_unique_identifier = '" + normalizeAccountID(accountType, accountUniqueID) + "'"); //NON-NLS
+		try (CaseDbConnection connection = db.getConnection();
+			Statement s = connection.createStatement();
+			ResultSet rs = connection.executeQuery(s, "SELECT * FROM accounts WHERE account_type_id = " + getAccountTypeId(accountType)
+					+ " AND account_unique_identifier = '" + normalizeAccountID(accountType, accountUniqueID) + "'");) { //NON-NLS
 
 			if (rs.next()) {
 				account = new Account(rs.getInt("account_id"), accountType,
@@ -320,9 +307,6 @@ public Account getAccount(org.sleuthkit.datamodel.Account.Type accountType, Stri
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting account type id", ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 
@@ -355,6 +339,10 @@ public Account getAccount(org.sleuthkit.datamodel.Account.Type accountType, Stri
 	public void addRelationships(AccountFileInstance sender, List<AccountFileInstance> recipients,
 			BlackboardArtifact sourceArtifact, org.sleuthkit.datamodel.Relationship.Type relationshipType, long dateTime) throws TskCoreException, TskDataException {
 
+		if (sourceArtifact.getDataSourceObjectID() == null) {
+			throw new TskDataException("Source Artifact does not have a valid data source.");
+		}
+
 		if (relationshipType.isCreatableFrom(sourceArtifact) == false) {
 			throw new TskDataException("Can not make a " + relationshipType.getDisplayName()
 					+ " relationship from a" + sourceArtifact.getDisplayName());
@@ -370,7 +358,7 @@ public void addRelationships(AccountFileInstance sender, List<AccountFileInstanc
 
 		if (null != sender) {
 			accountIDs.add(sender.getAccount().getAccountID());
-			if (sender.getDataSourceObjectID() != sourceArtifact.getDataSourceObjectID()) {
+			if (!sender.getDataSourceObjectID().equals(sourceArtifact.getDataSourceObjectID())) {
 				throw new TskDataException("Sender and relationship are from different data sources :"
 						+ "Sender source ID" + sender.getDataSourceObjectID() + " != relationship source ID" + sourceArtifact.getDataSourceObjectID());
 			}
@@ -378,15 +366,15 @@ public void addRelationships(AccountFileInstance sender, List<AccountFileInstanc
 
 		for (AccountFileInstance recipient : recipients) {
 			accountIDs.add(recipient.getAccount().getAccountID());
-			if (recipient.getDataSourceObjectID() != sourceArtifact.getDataSourceObjectID()) {
+			if (!recipient.getDataSourceObjectID().equals(sourceArtifact.getDataSourceObjectID())) {
 				throw new TskDataException("Recipient and relationship are from different data sources :"
 						+ "Recipient source ID" + recipient.getDataSourceObjectID() + " != relationship source ID" + sourceArtifact.getDataSourceObjectID());
 			}
 		}
-		
+
 		// Set up the query for the prepared statement
 		String query = "INTO account_relationships (account1_id, account2_id, relationship_source_obj_id, date_time, relationship_type, data_source_obj_id  ) "
-						+ "VALUES (?,?,?,?,?,?)";
+				+ "VALUES (?,?,?,?,?,?)";
 		switch (db.getDatabaseType()) {
 			case POSTGRESQL:
 				query = "INSERT " + query + " ON CONFLICT DO NOTHING";
@@ -396,13 +384,13 @@ public void addRelationships(AccountFileInstance sender, List<AccountFileInstanc
 				break;
 			default:
 				throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name());
-		}		
-		
-		CaseDbTransaction trans = db.beginTransaction();	
+		}
+
+		CaseDbTransaction trans = db.beginTransaction();
 		try {
 			SleuthkitCase.CaseDbConnection connection = trans.getConnection();
 			PreparedStatement preparedStatement = connection.getPreparedStatement(query, Statement.NO_GENERATED_KEYS);
-			
+
 			for (int i = 0; i < accountIDs.size(); i++) {
 				for (int j = i + 1; j < accountIDs.size(); j++) {
 					long account1_id = accountIDs.get(i);
@@ -461,26 +449,22 @@ private Account getOrCreateAccount(Account.Type accountType, String accountUniqu
 					throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name());
 			}
 
-			CaseDbConnection connection = db.getConnection();
-			db.acquireSingleUserCaseWriteLock();
+			CaseDbTransaction trans = db.beginTransaction();
 			Statement s = null;
 			ResultSet rs = null;
 			try {
-				connection.beginTransaction();
-				s = connection.createStatement();
+				s = trans.getConnection().createStatement();
 
 				s.execute(query);
 
-				connection.commitTransaction();
+				trans.commit();
 				account = getAccount(accountType, accountUniqueID);
 			} catch (SQLException ex) {
-				connection.rollbackTransaction();
+				trans.rollback();
 				throw new TskCoreException("Error adding an account", ex);
 			} finally {
 				closeResultSet(rs);
 				closeStatement(s);
-				connection.close();
-				db.releaseSingleUserCaseWriteLock();
 			}
 		}
 
@@ -505,13 +489,19 @@ private Account getOrCreateAccount(Account.Type accountType, String accountUniqu
 	 *                          case database.
 	 */
 	private BlackboardArtifact getOrCreateAccountFileInstanceArtifact(Account.Type accountType, String accountUniqueID, String moduleName, Content sourceFile) throws TskCoreException {
+		if (sourceFile == null) {
+			throw new TskCoreException("Source file not provided.");
+		}
+
 		BlackboardArtifact accountArtifact = getAccountFileInstanceArtifact(accountType, accountUniqueID, sourceFile);
 		if (accountArtifact == null) {
-			accountArtifact = db.newBlackboardArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT, sourceFile.getId());
-			Collection<BlackboardAttribute> attributes = new ArrayList<>();
-			attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, moduleName, accountType.getTypeName()));
-			attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID, moduleName, accountUniqueID));
-			accountArtifact.addAttributes(attributes);
+			List<BlackboardAttribute> attributes = Arrays.asList(
+					new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE, moduleName, accountType.getTypeName()),
+					new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID, moduleName, accountUniqueID)
+			);
+
+			accountArtifact = sourceFile.newDataArtifact(ACCOUNT_TYPE, attributes);
+			
 			try {
 				db.getBlackboard().postArtifact(accountArtifact, moduleName);
 			} catch (BlackboardException ex) {
@@ -536,45 +526,40 @@ private BlackboardArtifact getOrCreateAccountFileInstanceArtifact(Account.Type a
 	 */
 	private BlackboardArtifact getAccountFileInstanceArtifact(Account.Type accountType, String accountUniqueID, Content sourceFile) throws TskCoreException {
 		BlackboardArtifact accountArtifact = null;
-		CaseDbConnection connection = db.getConnection();
+		
+		String queryStr = "SELECT artifacts.artifact_id AS artifact_id,"
+			+ " artifacts.obj_id AS obj_id,"
+			+ " artifacts.artifact_obj_id AS artifact_obj_id,"
+			+ " artifacts.data_source_obj_id AS data_source_obj_id,"
+			+ " artifacts.artifact_type_id AS artifact_type_id,"
+			+ " artifacts.review_status_id AS review_status_id"
+			+ " FROM blackboard_artifacts AS artifacts"
+			+ "	JOIN blackboard_attributes AS attr_account_type"
+			+ "		ON artifacts.artifact_id = attr_account_type.artifact_id"
+			+ " JOIN blackboard_attributes AS attr_account_id"
+			+ "		ON artifacts.artifact_id = attr_account_id.artifact_id"
+			+ "		AND attr_account_id.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID.getTypeID()
+			+ "	    AND attr_account_id.value_text = '" + accountUniqueID + "'"
+			+ " WHERE artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID()
+			+ " AND attr_account_type.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE.getTypeID()
+			+ " AND attr_account_type.value_text = '" + accountType.getTypeName() + "'"
+			+ " AND artifacts.obj_id = " + sourceFile.getId(); //NON-NLS
+		
 		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-
-		try {
-			s = connection.createStatement();
-			String queryStr = "SELECT artifacts.artifact_id AS artifact_id,"
-					+ " artifacts.obj_id AS obj_id,"
-					+ " artifacts.artifact_obj_id AS artifact_obj_id,"
-					+ " artifacts.data_source_obj_id AS data_source_obj_id,"
-					+ " artifacts.artifact_type_id AS artifact_type_id,"
-					+ " artifacts.review_status_id AS review_status_id"
-					+ " FROM blackboard_artifacts AS artifacts"
-					+ "	JOIN blackboard_attributes AS attr_account_type"
-					+ "		ON artifacts.artifact_id = attr_account_type.artifact_id"
-					+ " JOIN blackboard_attributes AS attr_account_id"
-					+ "		ON artifacts.artifact_id = attr_account_id.artifact_id"
-					+ "		AND attr_account_id.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ID.getTypeID()
-					+ "	    AND attr_account_id.value_text = '" + accountUniqueID + "'"
-					+ " WHERE artifacts.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_ACCOUNT.getTypeID()
-					+ " AND attr_account_type.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE.getTypeID()
-					+ " AND attr_account_type.value_text = '" + accountType.getTypeName() + "'"
-					+ " AND artifacts.obj_id = " + sourceFile.getId(); //NON-NLS
-
-			rs = connection.executeQuery(s, queryStr); //NON-NLS
+		try (CaseDbConnection connection = db.getConnection();
+			Statement s = connection.createStatement();
+			ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS
 			if (rs.next()) {
 				BlackboardArtifact.Type bbartType = db.getArtifactType(rs.getInt("artifact_type_id"));
 
-				accountArtifact = new BlackboardArtifact(db, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				accountArtifact = new BlackboardArtifact(db, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						bbartType.getTypeID(), bbartType.getTypeName(), bbartType.getDisplayName(),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id")));
 			}
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting account", ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 
@@ -596,14 +581,10 @@ public org.sleuthkit.datamodel.Account.Type getAccountType(String accountTypeNam
 			return this.typeNameToAccountTypeMap.get(accountTypeName);
 		}
 
-		CaseDbConnection connection = db.getConnection();
 		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-
-		try {
-			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT account_type_id, type_name, display_name FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
+		try (CaseDbConnection connection = db.getConnection();
+			Statement s = connection.createStatement();
+			ResultSet rs = connection.executeQuery(s, "SELECT account_type_id, type_name, display_name FROM account_types WHERE type_name = '" + accountTypeName + "'");) { //NON-NLS
 			Account.Type accountType = null;
 			if (rs.next()) {
 				accountType = new Account.Type(accountTypeName, rs.getString("display_name"));
@@ -614,9 +595,6 @@ public org.sleuthkit.datamodel.Account.Type getAccountType(String accountTypeNam
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting account type id", ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -636,80 +614,76 @@ public org.sleuthkit.datamodel.Account.Type getAccountType(String accountTypeNam
 	 *                          within TSK core
 	 */
 	public List<AccountDeviceInstance> getAccountDeviceInstancesWithRelationships(CommunicationsFilter filter) throws TskCoreException {
-		CaseDbConnection connection = db.getConnection();
-		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
 
-		try {
-			s = connection.createStatement();
-
-			//set up applicable filters 
-			Set<String> applicableInnerQueryFilters = new HashSet<String>(Arrays.asList(
-					CommunicationsFilter.DateRangeFilter.class.getName(),
-					CommunicationsFilter.DeviceFilter.class.getName(),
-					CommunicationsFilter.RelationshipTypeFilter.class.getName()
-			));
-			String relationshipFilterSQL = getCommunicationsFilterSQL(filter, applicableInnerQueryFilters);
-
-			String relationshipLimitSQL = getMostRecentFilterLimitSQL(filter);
-
-			String relTblfilterQuery
-					= "SELECT * "
-					+ "FROM account_relationships as relationships"
-					+ (relationshipFilterSQL.isEmpty() ? "" : " WHERE " + relationshipFilterSQL)
-					+ (relationshipLimitSQL.isEmpty() ? "" : relationshipLimitSQL);
-
-			String uniqueAccountQueryTemplate
-					= " SELECT %1$1s as account_id,"
-					+ " data_source_obj_id"
-					+ " FROM ( " + relTblfilterQuery + ")AS %2$s";
-
-			String relationshipTableFilterQuery1 = String.format(uniqueAccountQueryTemplate, "account1_id", "union_query_1");
-			String relationshipTableFilterQuery2 = String.format(uniqueAccountQueryTemplate, "account2_id", "union_query_2");
-
-			//this query groups by account_id and data_source_obj_id across both innerQueries
-			String uniqueAccountQuery
-					= "SELECT DISTINCT account_id, data_source_obj_id"
-					+ " FROM ( " + relationshipTableFilterQuery1 + " UNION " + relationshipTableFilterQuery2 + " ) AS inner_union"
-					+ " GROUP BY account_id, data_source_obj_id";
-
-			// set up applicable filters
-			Set<String> applicableFilters = new HashSet<String>(Arrays.asList(
-					CommunicationsFilter.AccountTypeFilter.class.getName()
-			));
-
-			String accountTypeFilterSQL = getCommunicationsFilterSQL(filter, applicableFilters);
-
-			String queryStr
-					= //account info
-					" accounts.account_id AS account_id,"
-					+ " accounts.account_unique_identifier AS account_unique_identifier,"
-					//account type info
-					+ " account_types.type_name AS type_name,"
-					//Account device instance info
-					+ " data_source_info.device_id AS device_id"
-					+ " FROM ( " + uniqueAccountQuery + " ) AS account_device_instances"
-					+ " JOIN accounts AS accounts"
-					+ "		ON accounts.account_id = account_device_instances.account_id"
-					+ " JOIN account_types AS account_types"
-					+ "		ON accounts.account_type_id = account_types.account_type_id"
-					+ " JOIN data_source_info AS data_source_info"
-					+ "		ON account_device_instances.data_source_obj_id = data_source_info.obj_id"
-					+ (accountTypeFilterSQL.isEmpty() ? "" : " WHERE " + accountTypeFilterSQL);
+		//set up applicable filters 
+		Set<String> applicableInnerQueryFilters = new HashSet<String>(Arrays.asList(
+				CommunicationsFilter.DateRangeFilter.class.getName(),
+				CommunicationsFilter.DeviceFilter.class.getName(),
+				CommunicationsFilter.RelationshipTypeFilter.class.getName()
+		));
+		String relationshipFilterSQL = getCommunicationsFilterSQL(filter, applicableInnerQueryFilters);
 
-			switch (db.getDatabaseType()) {
-				case POSTGRESQL:
-					queryStr = "SELECT DISTINCT ON ( accounts.account_id, data_source_info.device_id) " + queryStr;
-					break;
-				case SQLITE:
-					queryStr = "SELECT " + queryStr + " GROUP BY accounts.account_id, data_source_info.device_id";
-					break;
-				default:
-					throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name());
-			}
+		String relationshipLimitSQL = getMostRecentFilterLimitSQL(filter);
+
+		String relTblfilterQuery
+				= "SELECT * "
+				+ "FROM account_relationships as relationships"
+				+ (relationshipFilterSQL.isEmpty() ? "" : " WHERE " + relationshipFilterSQL)
+				+ (relationshipLimitSQL.isEmpty() ? "" : relationshipLimitSQL);
+
+		String uniqueAccountQueryTemplate
+				= " SELECT %1$1s as account_id,"
+				+ " data_source_obj_id"
+				+ " FROM ( " + relTblfilterQuery + ")AS %2$s";
 
-			rs = connection.executeQuery(s, queryStr); //NON-NLS
+		String relationshipTableFilterQuery1 = String.format(uniqueAccountQueryTemplate, "account1_id", "union_query_1");
+		String relationshipTableFilterQuery2 = String.format(uniqueAccountQueryTemplate, "account2_id", "union_query_2");
+
+		//this query groups by account_id and data_source_obj_id across both innerQueries
+		String uniqueAccountQuery
+				= "SELECT DISTINCT account_id, data_source_obj_id"
+				+ " FROM ( " + relationshipTableFilterQuery1 + " UNION " + relationshipTableFilterQuery2 + " ) AS inner_union"
+				+ " GROUP BY account_id, data_source_obj_id";
+
+		// set up applicable filters
+		Set<String> applicableFilters = new HashSet<String>(Arrays.asList(
+				CommunicationsFilter.AccountTypeFilter.class.getName()
+		));
+
+		String accountTypeFilterSQL = getCommunicationsFilterSQL(filter, applicableFilters);
+
+		String queryStr
+				= //account info
+				" accounts.account_id AS account_id,"
+				+ " accounts.account_unique_identifier AS account_unique_identifier,"
+				//account type info
+				+ " account_types.type_name AS type_name,"
+				//Account device instance info
+				+ " data_source_info.device_id AS device_id"
+				+ " FROM ( " + uniqueAccountQuery + " ) AS account_device_instances"
+				+ " JOIN accounts AS accounts"
+				+ "		ON accounts.account_id = account_device_instances.account_id"
+				+ " JOIN account_types AS account_types"
+				+ "		ON accounts.account_type_id = account_types.account_type_id"
+				+ " JOIN data_source_info AS data_source_info"
+				+ "		ON account_device_instances.data_source_obj_id = data_source_info.obj_id"
+				+ (accountTypeFilterSQL.isEmpty() ? "" : " WHERE " + accountTypeFilterSQL);
+
+		switch (db.getDatabaseType()) {
+			case POSTGRESQL:
+				queryStr = "SELECT DISTINCT ON ( accounts.account_id, data_source_info.device_id) " + queryStr;
+				break;
+			case SQLITE:
+				queryStr = "SELECT " + queryStr + " GROUP BY accounts.account_id, data_source_info.device_id";
+				break;
+			default:
+				throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name());
+		}
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS
 			ArrayList<AccountDeviceInstance> accountDeviceInstances = new ArrayList<AccountDeviceInstance>();
 			while (rs.next()) {
 				long account_id = rs.getLong("account_id");
@@ -726,9 +700,6 @@ public List<AccountDeviceInstance> getAccountDeviceInstancesWithRelationships(Co
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting account device instances. " + ex.getMessage(), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -808,16 +779,13 @@ public Map<AccountPair, Long> getRelationshipCountsPairwise(Set<AccountDeviceIns
 				+ "		accounts2.account_id, "
 				+ "		account_types2.type_name, "
 				+ "		account_types2.display_name";
-		CaseDbConnection connection = db.getConnection();
-		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-
+		
 		Map<AccountPair, Long> results = new HashMap<AccountPair, Long>();
-
-		try {
-			s = connection.createStatement();
-			rs = connection.executeQuery(s, queryString); //NON-NLS
+		
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection();
+			Statement s = connection.createStatement();
+			ResultSet rs = connection.executeQuery(s, queryString);) { //NON-NLS
 
 			while (rs.next()) {
 				//make account 1
@@ -846,9 +814,6 @@ public Map<AccountPair, Long> getRelationshipCountsPairwise(Set<AccountDeviceIns
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting relationships between accounts. " + ex.getMessage(), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -883,38 +848,30 @@ public long getRelationshipSourcesCount(AccountDeviceInstance accountDeviceInsta
 		));
 		String filterSQL = getCommunicationsFilterSQL(filter, applicableFilters);
 
-		CaseDbConnection connection = db.getConnection();
-		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-
-		try {
-			s = connection.createStatement();
-
-			String innerQuery = " account_relationships AS relationships";
-			String limitStr = getMostRecentFilterLimitSQL(filter);
-
-			if (!limitStr.isEmpty()) {
-				innerQuery = "(SELECT * FROM account_relationships as relationships " + limitStr + ") as relationships";
-			}
+		String innerQuery = " account_relationships AS relationships";
+		String limitStr = getMostRecentFilterLimitSQL(filter);
 
-			String queryStr
-					= "SELECT count(DISTINCT relationships.relationship_source_obj_id) as count "
-					+ "	FROM" + innerQuery
-					+ " WHERE relationships.data_source_obj_id IN ( " + datasourceObjIdsCSV + " )"
-					+ " AND ( relationships.account1_id = " + account_id
-					+ "      OR  relationships.account2_id = " + account_id + " )"
-					+ (filterSQL.isEmpty() ? "" : " AND " + filterSQL);
+		if (!limitStr.isEmpty()) {
+			innerQuery = "(SELECT * FROM account_relationships as relationships " + limitStr + ") as relationships";
+		}
 
-			rs = connection.executeQuery(s, queryStr); //NON-NLS
+		String queryStr
+			= "SELECT count(DISTINCT relationships.relationship_source_obj_id) as count "
+			+ "	FROM" + innerQuery
+			+ " WHERE relationships.data_source_obj_id IN ( " + datasourceObjIdsCSV + " )"
+			+ " AND ( relationships.account1_id = " + account_id
+			+ "      OR  relationships.account2_id = " + account_id + " )"
+			+ (filterSQL.isEmpty() ? "" : " AND " + filterSQL);
+		
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS
 			rs.next();
 			return (rs.getLong("count"));
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting relationships count for account device instance. " + ex.getMessage(), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -981,36 +938,34 @@ public Set<Content> getRelationshipSources(Set<AccountDeviceInstance> accountDev
 		if (!limitStr.isEmpty()) {
 			limitQuery = "(SELECT * FROM account_relationships as relationships " + limitStr + ") as relationships";
 		}
+		
+		String queryStr
+			= "SELECT DISTINCT artifacts.artifact_id AS artifact_id,"
+			+ " artifacts.obj_id AS obj_id,"
+			+ " artifacts.artifact_obj_id AS artifact_obj_id,"
+			+ " artifacts.data_source_obj_id AS data_source_obj_id, "
+			+ " artifacts.artifact_type_id AS artifact_type_id, "
+			+ " artifacts.review_status_id AS review_status_id  "
+			+ " FROM blackboard_artifacts as artifacts"
+			+ " JOIN " + limitQuery
+			+ "	ON artifacts.artifact_obj_id = relationships.relationship_source_obj_id"
+			// append sql to restrict search to specified account device instances 
+			+ " WHERE (" + adiSQLClause + " )"
+			// plus other filters
+			+ (filterSQL.isEmpty() ? "" : " AND (" + filterSQL + " )");
 
-		CaseDbConnection connection = db.getConnection();
+		
 		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-
-		try {
-			s = connection.createStatement();
-			String queryStr
-					= "SELECT DISTINCT artifacts.artifact_id AS artifact_id,"
-					+ " artifacts.obj_id AS obj_id,"
-					+ " artifacts.artifact_obj_id AS artifact_obj_id,"
-					+ " artifacts.data_source_obj_id AS data_source_obj_id, "
-					+ " artifacts.artifact_type_id AS artifact_type_id, "
-					+ " artifacts.review_status_id AS review_status_id  "
-					+ " FROM blackboard_artifacts as artifacts"
-					+ " JOIN " + limitQuery
-					+ "	ON artifacts.artifact_obj_id = relationships.relationship_source_obj_id"
-					// append sql to restrict search to specified account device instances 
-					+ " WHERE (" + adiSQLClause + " )"
-					// plus other filters
-					+ (filterSQL.isEmpty() ? "" : " AND (" + filterSQL + " )");
-
-			rs = connection.executeQuery(s, queryStr); //NON-NLS
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryStr);) { //NON-NLS
 			Set<Content> relationshipSources = new HashSet<Content>();
 			while (rs.next()) {
 				BlackboardArtifact.Type bbartType = db.getArtifactType(rs.getInt("artifact_type_id"));
 				relationshipSources.add(new BlackboardArtifact(db, rs.getLong("artifact_id"),
 						rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
-						rs.getLong("data_source_obj_id"), bbartType.getTypeID(),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
+						bbartType.getTypeID(),
 						bbartType.getTypeName(), bbartType.getDisplayName(),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -1019,9 +974,6 @@ public Set<Content> getRelationshipSources(Set<AccountDeviceInstance> accountDev
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting relationships for account. " + ex.getMessage(), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -1106,15 +1058,10 @@ public List<AccountDeviceInstance> getRelatedAccountDeviceInstances(AccountDevic
 				throw new TskCoreException("Unknown DB Type: " + db.getDatabaseType().name());
 		}
 
-		CaseDbConnection connection = db.getConnection();
 		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-
-		try {
-			s = connection.createStatement();
-
-			rs = connection.executeQuery(s, queryStr); //NON-NLS
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryStr);) {
 			ArrayList<AccountDeviceInstance> accountDeviceInstances = new ArrayList<AccountDeviceInstance>();
 			while (rs.next()) {
 				long account_id = rs.getLong("account_id");
@@ -1131,9 +1078,6 @@ public List<AccountDeviceInstance> getRelatedAccountDeviceInstances(AccountDevic
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting account device instances. " + ex.getMessage(), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -1184,18 +1128,17 @@ public List<Content> getRelationshipSources(AccountDeviceInstance account1, Acco
 				+ " ) OR (	  relationships.account2_id = " + account1.getAccount().getAccountID()
 				+ " AND relationships.account1_id =" + account2.getAccount().getAccountID() + " ))"
 				+ (filterSQL.isEmpty() ? "" : " AND " + filterSQL);
-		CaseDbConnection connection = db.getConnection();
+
 		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
-		try {
-			s = connection.createStatement();
-			rs = connection.executeQuery(s, queryString); //NON-NLS
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString);) {
 
 			ArrayList<Content> artifacts = new ArrayList<Content>();
 			while (rs.next()) {
 				BlackboardArtifact.Type bbartType = db.getArtifactType(rs.getInt("artifact_type_id"));
-				artifacts.add(new BlackboardArtifact(db, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(db, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						bbartType.getTypeID(), bbartType.getTypeName(), bbartType.getDisplayName(),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -1204,9 +1147,6 @@ public List<Content> getRelationshipSources(AccountDeviceInstance account1, Acco
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting relationships between accounts. " + ex.getMessage(), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -1248,17 +1188,15 @@ public List<AccountFileInstance> getAccountFileInstances(Account account) throws
 	 * @throws TskCoreException
 	 */
 	public List<Account.Type> getAccountTypesInUse() throws TskCoreException {
-		CaseDbConnection connection = db.getConnection();
-		db.acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
+		
+		String query = "SELECT DISTINCT accounts.account_type_id, type_name, display_name FROM accounts JOIN account_types ON accounts.account_type_id = account_types.account_type_id";
 		List<Account.Type> inUseAccounts = new ArrayList<>();
-
-		try {
-			String query = "SELECT DISTINCT accounts.account_type_id, type_name, display_name FROM accounts JOIN account_types ON accounts.account_type_id = account_types.account_type_id";
-			s = connection.createStatement();
-			rs = connection.executeQuery(s, query); //NON-NLS
-			Account.Type accountType = null;
+		
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, query);) {
+			Account.Type accountType;
 			while (rs.next()) {
 				String accountTypeName = rs.getString("type_name");
 				accountType = this.typeNameToAccountTypeMap.get(accountTypeName);
@@ -1274,9 +1212,6 @@ public List<Account.Type> getAccountTypesInUse() throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting account type id", ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(s);
-			connection.close();
 			db.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -1296,8 +1231,8 @@ public List<Account> getAccountsRelatedToArtifact(BlackboardArtifact artifact) t
 		}
 
 		List<Account> accountList = new ArrayList<>();
+		db.acquireSingleUserCaseReadLock();
 		try (CaseDbConnection connection = db.getConnection()) {
-			db.acquireSingleUserCaseReadLock();
 			try {
 				// In order to get a list of all the unique accounts in a relationship with the given aritfact
 				// we must first union a list of the unique account1_id in the relationship with artifact
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Content.java b/bindings/java/src/org/sleuthkit/datamodel/Content.java
index caa02b9338ce51eb361d3698d9f2cc61a372f614..39df2ae626069ff0dec5310be61cb7e55873c011 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Content.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Content.java
@@ -1,15 +1,15 @@
 /*
  * Sleuth Kit Data Model
- * 
+ *
  * Copyright 2011-2016 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
- * 
+ *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
  * You may obtain a copy of the License at
- * 
+ *
  *     http://www.apache.org/licenses/LICENSE-2.0
- * 
+ *
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -19,6 +19,7 @@
 package org.sleuthkit.datamodel;
 
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.List;
 import java.util.Set;
 
@@ -159,7 +160,9 @@ public interface Content extends SleuthkitVisitableItem {
 	 *         looked up from this)
 	 *
 	 * @throws TskCoreException if critical error occurred within tsk core
+	 * @deprecated Please use newDataArtifact or newAnalysisResult.
 	 */
+	@Deprecated
 	public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException;
 
 	/**
@@ -171,9 +174,108 @@ public interface Content extends SleuthkitVisitableItem {
 	 *         looked up from this)
 	 *
 	 * @throws TskCoreException if critical error occurred within tsk core
+	 * @deprecated Please use newDataArtifact or newAnalysisResult.
 	 */
+	@Deprecated
 	public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException;
 
+	/**
+	 * Create and add an analysis result associated with this content.
+	 *
+	 *
+	 * @param artifactType	  Type of analysis result artifact to create.
+	 * @param score          Score associated with this analysis.
+	 * @param conclusion     Conclusion from the analysis, may be empty.
+	 * @param configuration  Configuration element associated with this
+	 *                       analysis, may be empty.
+	 * @param justification	 Justification
+	 * @param attributesList Additional attributes to attach to this analysis
+	 *                       result artifact.
+	 *
+	 * @return AnalysisResultAdded The analysis return added and the current
+	 *         aggregate score of content.
+	 *
+	 * @throws TskCoreException if critical error occurred within tsk core.
+	 */
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList) throws TskCoreException;
+
+	/**
+	 * Create and add an analysis result associated with this content.
+	 *
+	 *
+	 * @param artifactType	  Type of analysis result artifact to create.
+	 * @param score          Score associated with this analysis.
+	 * @param conclusion     Conclusion from the analysis, may be empty.
+	 * @param configuration  Configuration element associated with this
+	 *                       analysis, may be empty.
+	 * @param justification	 Justification
+	 * @param attributesList Additional attributes to attach to this analysis
+	 *                       result artifact.
+	 * @param dataDourcrId   The data source for the analysis result
+	 *
+	 * @return AnalysisResultAdded The analysis return added and the current
+	 *         aggregate score of content.
+	 *
+	 * @throws TskCoreException if critical error occurred within tsk core.
+	 */
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, long dataSourceId) throws TskCoreException;
+
+	/**
+	 * Create and add a data artifact associated with this abstract file. This
+	 * method creates the data artifact with the os account id associated with
+	 * this abstract file if one exists.
+	 *
+	 * @param artifactType   Type of data artifact to create.
+	 * @param attributesList Additional attributes to attach to this data
+	 *                       artifact.
+	 *
+	 * @return DataArtifact New data artifact.
+	 *
+	 * @throws TskCoreException If a critical error occurred within tsk core.
+	 */
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException;
+	
+	
+	/**
+	 * Create and add a data artifact associated with this content.
+	 *
+	 * @param artifactType   Type of analysis result artifact to create.
+	 * @param attributesList Additional attributes to attach to this data
+	 *                       artifact.
+	 * @param osAccountId    The OS account id associated with the artifact. May
+	 *                       be null.
+	 *
+	 * @return DataArtifact New data artifact.
+	 *
+	 * @throws TskCoreException If a critical error occurred within tsk core.
+	 */
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException;
+
+	/**
+	 * Create and add a data artifact associated with this content.
+	 *
+	 * @param artifactType   Type of analysis result artifact to create.
+	 * @param attributesList Additional attributes to attach to this data
+	 *                       artifact.
+	 * @param osAccountId    The OS account id associated with the artifact. May
+	 *                       be null.
+	 * @param dataSourceId   The data source id of the artifact
+	 *
+	 * @return DataArtifact New data artifact.
+	 *
+	 * @throws TskCoreException If a critical error occurred within tsk core.
+	 */
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId, long dataSourceId) throws TskCoreException;
+
+	/**
+	 * Returns the final score for the content object.
+	 *
+	 * @return Score.
+	 *
+	 * @throws TskCoreException if critical error occurred within tsk core.
+	 */
+	public Score getAggregateScore() throws TskCoreException;
+
 	/**
 	 * Get all artifacts associated with this content that have the given type
 	 * name
@@ -186,6 +288,18 @@ public interface Content extends SleuthkitVisitableItem {
 	 */
 	public ArrayList<BlackboardArtifact> getArtifacts(String artifactTypeName) throws TskCoreException;
 
+	/**
+	 * Get all analysis results associated with this content, that have the
+	 * given type.
+	 *
+	 * @param artifactType Type to look up.
+	 *
+	 * @return A list of analysis result artifacts matching the type.
+	 *
+	 * @throws TskCoreException If critical error occurred within tsk core.
+	 */
+	public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException;
+
 	/**
 	 * Return the TSK_GEN_INFO artifact for the file so that individual
 	 * attributes can be added to it. Creates one if it does not already exist.
@@ -253,6 +367,24 @@ public interface Content extends SleuthkitVisitableItem {
 	 */
 	public ArrayList<BlackboardArtifact> getAllArtifacts() throws TskCoreException;
 
+	/**
+	 * Get all data artifacts associated with this content.
+	 *
+	 * @return A list of data artifacts.
+	 *
+	 * @throws TskCoreException If critical error occurred within tsk core.
+	 */
+	public List<DataArtifact> getAllDataArtifacts() throws TskCoreException;
+		
+	/**
+	 * Get all analysis results associated with this content.
+	 *
+	 * @return A list of analysis results.
+	 *
+	 * @throws TskCoreException If critical error occurred within tsk core.
+	 */
+	public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException;
+
 	/**
 	 * Get the names of all the hashsets that this content is in.
 	 *
diff --git a/bindings/java/src/org/sleuthkit/datamodel/ContentVisitor.java b/bindings/java/src/org/sleuthkit/datamodel/ContentVisitor.java
index 8aa43c6898f6701ec5f8021c6bc9392f63a7358b..7d922b9f126955745f0717dcdfcaccbde3728d71 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/ContentVisitor.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/ContentVisitor.java
@@ -165,6 +165,24 @@ public interface ContentVisitor<T> {
 	 * @return result of the visit
 	 */
 	T visit(Report r);
+	
+	/**
+	 * Act on (visit) a OsAccount object
+	 *
+	 * @param act OsAccount object to visit / act on
+	 *
+	 * @return result of the visit
+	 */
+	T visit(OsAccount act);
+	
+	/**
+	 * Act on (visit) an UnsupportedContent object
+	 *
+	 * @param uc UnsupportedContent object to visit / act on
+	 *
+	 * @return result of the visit
+	 */
+	T visit(UnsupportedContent uc);
 
 	/**
 	 * The default content visitor - quickest method for implementing a custom
@@ -254,5 +272,15 @@ public T visit(BlackboardArtifact ba) {
 		public T visit(Report r) {
 			return defaultVisit(r);
 		}
+		
+		@Override
+		public T visit(OsAccount act) {
+			return defaultVisit(act);
+		}
+		
+		@Override
+		public T visit(UnsupportedContent uc) {
+			return defaultVisit(uc);
+		}
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/DataArtifact.java b/bindings/java/src/org/sleuthkit/datamodel/DataArtifact.java
new file mode 100644
index 0000000000000000000000000000000000000000..ad1ea7d7107c942bdfd026a5c8e0df2051bf8ced
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/DataArtifact.java
@@ -0,0 +1,67 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *	 http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Optional;
+
+/**
+ * DataArtifact is a category of artifact types that are simply data directly
+ * extracted from a data source.
+ */
+public final class DataArtifact extends BlackboardArtifact {
+
+	// data artifacts may have a OS Account associated with them.
+	private final Long osAccountObjId;
+
+	/**
+	 * Constructs a DataArtifact.
+	 *
+	 * @param sleuthkitCase    The SleuthKit case (case database) that contains
+	 *                         the artifact data.
+	 * @param artifactID       The unique id for this artifact.
+	 * @param sourceObjId      The unique id of the content with which this
+	 *                         artifact is associated.
+	 * @param artifactObjId    The object id of artifact, in tsk_objects.
+	 * @param dataSourceObjId  Object ID of the data source where the artifact
+	 *                         was found. May be null.
+	 * @param artifactTypeID   The type id of this artifact.
+	 * @param artifactTypeName The type name of this artifact.
+	 * @param displayName      The display name of this artifact.
+	 * @param reviewStatus     The review status of this artifact.
+	 * @param osAccountObjId   OsAccount associated with this artifact, may be
+	 *                         null.
+	 * @param isNew            The object is newly created.
+	 */
+	DataArtifact(SleuthkitCase sleuthkitCase, long artifactID, long sourceObjId, long artifactObjId, Long dataSourceObjId, int artifactTypeID, String artifactTypeName, String displayName, ReviewStatus reviewStatus, Long osAccountObjId, boolean isNew) {
+		super(sleuthkitCase, artifactID, sourceObjId, artifactObjId, dataSourceObjId, artifactTypeID, artifactTypeName, displayName, reviewStatus, isNew);
+		this.osAccountObjId = osAccountObjId;
+	}
+
+	/**
+	 * Gets the OS Account for this artifact.
+	 *
+	 * @return Optional with OsAccount, Optional.empty if there is no account.
+	 *
+	 * @throws TskCoreException If there is an error getting the account.
+	 */
+	public Optional<Long> getOsAccountObjectId() throws TskCoreException {
+		return Optional.ofNullable(osAccountObjId);
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/DataSource.java b/bindings/java/src/org/sleuthkit/datamodel/DataSource.java
index f2633a1172998e3efc47c9dda9502d8aaae78d3f..416abfa3f86a3bb334fc03d07a1d9598212b8975 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/DataSource.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/DataSource.java
@@ -74,6 +74,18 @@ public interface DataSource extends Content {
 	 * @throws TskCoreException Thrown if the data can not be written
 	 */
 	void setAcquisitionDetails(String details) throws TskCoreException;
+
+	/**
+	 * Sets the acquisition tool details such as its name, version number and
+	 * any settings used during the acquisition to acquire data.
+	 *
+	 * @param name     The name of the acquisition tool. May be NULL.
+	 * @param version  The acquisition tool version number. May be NULL.
+	 * @param settings The settings used by the acquisition tool. May be NULL.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be written
+	 */
+	void setAcquisitionToolDetails(String name, String version, String settings) throws TskCoreException;
 	
 	/**
 	 * Gets the acquisition details field from the case database.
@@ -83,4 +95,49 @@ public interface DataSource extends Content {
 	 * @throws TskCoreException Thrown if the data can not be read
 	 */
 	String getAcquisitionDetails() throws TskCoreException;
+
+	/**
+	 * Gets the acquisition tool settings field from the case database.
+	 *
+	 * @return The acquisition tool settings. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	String getAcquisitionToolSettings() throws TskCoreException;
+
+	/**
+	 * Gets the acquisition tool name field from the case database.
+	 *
+	 * @return The acquisition tool name. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	String getAcquisitionToolName() throws TskCoreException;
+
+	/**
+	 * Gets the acquisition tool version field from the case database.
+	 *
+	 * @return The acquisition tool version. May be Null if not set. 
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	String getAcquisitionToolVersion() throws TskCoreException;
+
+	/**
+	 * Gets the added date field from the case database.
+	 *
+	 * @return The date time when the image was added in epoch seconds.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	Long getDateAdded() throws TskCoreException;
+	
+	/**
+	 * Gets the host for this data source.
+	 * 
+	 * @return The host
+	 * 
+	 * @throws TskCoreException 
+	 */
+	Host getHost() throws TskCoreException;
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/DerivedFile.java b/bindings/java/src/org/sleuthkit/datamodel/DerivedFile.java
index ef147fb10c980eb874594e42931342e0ba336dd2..35a52ab4fd65c202ebd511b3edb56a780d1278d9 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/DerivedFile.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/DerivedFile.java
@@ -19,6 +19,7 @@
 package org.sleuthkit.datamodel;
 
 import java.text.MessageFormat;
+import java.util.Collections;
 import java.util.ResourceBundle;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -82,6 +83,9 @@ public class DerivedFile extends AbstractFile {
 	 * @param encodingType		     The encoding type of the file.
 	 * @param extension          The extension part of the file name (not
 	 *                           including the '.'), can be null.
+	 * @param ownerUid			 UID of the file owner as found in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
 	 */
 	DerivedFile(SleuthkitCase db,
 			long objId,
@@ -97,12 +101,14 @@ public class DerivedFile extends AbstractFile {
 			long parentId,
 			String mimeType,
 			TskData.EncodingType encodingType,
-			String extension) {
+			String extension, 
+			String ownerUid,
+			Long osAccountObjId) {
 		// TODO (AUT-1904): The parent id should be passed to AbstractContent 
 		// through the class hierarchy contructors.
 		super(db, objId, dataSourceObjectId, TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0,
-				name, TSK_DB_FILES_TYPE_ENUM.LOCAL, 0L, 0, dirType, metaType, dirFlag,
-				metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension);
+				name, TSK_DB_FILES_TYPE_ENUM.DERIVED, 0L, 0, dirType, metaType, dirFlag,
+				metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, Collections.emptyList());
 		setLocalFilePath(localPath);
 		setEncodingType(encodingType);
 	}
@@ -306,7 +312,7 @@ protected DerivedFile(SleuthkitCase db,
 		this(db, objId, db.getDataSourceObjectId(objId), name, dirType, metaType, dirFlag, metaFlags, size,
 				ctime, crtime, atime, mtime,
 				md5Hash, null, knownState,
-				parentPath, localPath, parentId, null, TskData.EncodingType.NONE, null);
+				parentPath, localPath, parentId, null, TskData.EncodingType.NONE, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT);
 	}
 
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Directory.java b/bindings/java/src/org/sleuthkit/datamodel/Directory.java
index 39b0c62188ef261354eedeb1596ba6f6f2fd0e6f..f0d376b381093971ca36fde7624365117502bda0 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Directory.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Directory.java
@@ -18,6 +18,7 @@
  */
 package org.sleuthkit.datamodel;
 
+import java.util.Collections;
 import org.sleuthkit.datamodel.TskData.FileKnown;
 import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
@@ -72,6 +73,9 @@ public class Directory extends FsContent {
 	 * @param knownState         The known state of the file from a hash
 	 *                           database lookup, null if not yet looked up.
 	 * @param parentPath         The path of the parent of the file.
+	 * @param ownerUid			 UID of the file owner as found in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
 	 */
 	Directory(SleuthkitCase db,
 			long objId,
@@ -85,8 +89,9 @@ public class Directory extends FsContent {
 			long size,
 			long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid,
-			String md5Hash, String sha256Hash, FileKnown knownState, String parentPath) {
-		super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, null, null);
+			String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, 
+			String ownerUid, Long osAccountObjId ) {
+		super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, null, null, ownerUid, osAccountObjId, Collections.emptyList());
 	}
 
 	/**
@@ -247,6 +252,6 @@ protected Directory(SleuthkitCase db,
 			long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid,
 			String md5Hash, FileKnown knownState, String parentPath) {
-		this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath);
+		this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT);
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/File.java b/bindings/java/src/org/sleuthkit/datamodel/File.java
index c6758c0ff88657acb513b55810acbc445b518df3..659ed70acdee84c323b760f6a1a7f7915be6196b 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/File.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/File.java
@@ -18,6 +18,8 @@
  */
 package org.sleuthkit.datamodel;
 
+import java.util.Collections;
+import java.util.List;
 import org.sleuthkit.datamodel.TskData.FileKnown;
 import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
@@ -76,6 +78,9 @@ public class File extends FsContent {
 	 *                           yet been determined.
 	 * @param extension	         The extension part of the file name (not
 	 *                           including the '.'), can be null.
+	 * @param ownerUid			 UID of the file owner as found in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
 	 */
 	File(SleuthkitCase db,
 			long objId,
@@ -90,8 +95,11 @@ public class File extends FsContent {
 			long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid,
 			String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType,
-			String extension) {
-		super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension);
+			String extension,
+			String ownerUid,
+			Long osAccountObjId,
+			List<Attribute> fileAttributes) {
+		super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, fileAttributes);
 	}
 
 	/**
@@ -245,6 +253,6 @@ protected File(SleuthkitCase db,
 			String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType,
 			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath, String mimeType) {
-		this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, mimeType, null);
+		this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, mimeType, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList());
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/FileManager.java b/bindings/java/src/org/sleuthkit/datamodel/FileManager.java
new file mode 100644
index 0000000000000000000000000000000000000000..98f04c85586d6955d94628338d2e2e289d7f161b
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/FileManager.java
@@ -0,0 +1,139 @@
+/*
+ * SleuthKit Java Bindings
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Utility class for file-based database queries.
+ */
+public class FileManager {
+
+	private final SleuthkitCase skCase;
+
+	/**
+	 * Constructs a FileManager.
+	 *
+	 * @param casedb The case database.
+	 */
+	FileManager(SleuthkitCase skCase) {
+		this.skCase = Objects.requireNonNull(skCase, "Cannot create Blackboard for null SleuthkitCase");
+	}
+	
+	/**
+     * Find all files with the exact given name and parentId.
+     * 
+     * @param parentId Id of the parent folder to search.
+     * @param name Exact file name to match.
+     * 
+     * @return A list of matching files.
+     * 
+     * @throws TskCoreException 
+     */
+    public List<AbstractFile> findFilesExactName(long parentId, String name) throws TskCoreException {
+		String ext = SleuthkitCase.extractExtension(name);
+						
+		String query = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id "
+				+ " WHERE tsk_objects.par_obj_id = ? AND tsk_files.name = ? ";
+		
+		if (!ext.isEmpty()) {
+			query += " AND tsk_files.extension = ? ";
+		}
+		
+		skCase.acquireSingleUserCaseReadLock();
+		try (SleuthkitCase.CaseDbConnection connection = skCase.getConnection()) {
+			PreparedStatement statement = connection.getPreparedStatement(query, Statement.RETURN_GENERATED_KEYS);
+			statement.clearParameters();
+			statement.setLong(1, parentId);
+			statement.setString(2, name);
+			
+			if (!ext.isEmpty()) {
+				statement.setString(3, ext);
+			}
+			
+			try (ResultSet rs = connection.executeQuery(statement)) {
+				return skCase.resultSetToAbstractFiles(rs, connection);
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException("SQLException thrown when calling query: " + query + " for parentID = " + parentId + " and name " + name, ex);
+		} finally {
+			skCase.releaseSingleUserCaseReadLock();
+		}
+    }
+	
+	/**
+     * Find all files with the exact given name and exact parent path.
+     * 
+	 * @param dataSource The data source to search within.
+     * @param name Exact file name to match.
+	 * @param path Exact parent path.
+     * 
+     * @return A list of matching files.
+     * 
+     * @throws TskCoreException 
+     */
+	public List<AbstractFile> findFilesExactNameExactPath(Content dataSource, String name, String path) throws TskCoreException {
+		
+		// Database paths will always start and end with a forward slash, so add those if not present
+		String normalizedPath = path;
+		if (!normalizedPath.startsWith("/")) {
+			normalizedPath = "/" + normalizedPath;
+		}
+		if (!normalizedPath.endsWith("/")) {
+			normalizedPath = normalizedPath + "/";
+		}
+		
+		String ext = SleuthkitCase.extractExtension(name);
+		
+		String query = "";
+		skCase.acquireSingleUserCaseReadLock();
+		try (SleuthkitCase.CaseDbConnection connection = skCase.getConnection()) {
+			PreparedStatement statement;
+			if (ext.isEmpty()) {
+				query = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE parent_path = ? AND name = ? AND data_source_obj_id = ?";
+				statement = connection.getPreparedStatement(query, Statement.RETURN_GENERATED_KEYS);
+				statement.clearParameters();
+				statement.setString(1, normalizedPath);
+				statement.setString(2, name);
+				statement.setLong(3, dataSource.getId());
+			} else {
+				// This is done as an optimization since the extension column in tsk_files is indexed
+				query = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE extension = ? AND parent_path = ? AND name = ? AND data_source_obj_id = ?";
+				statement = connection.getPreparedStatement(query, Statement.RETURN_GENERATED_KEYS);
+				statement.clearParameters();
+				statement.setString(1, ext);
+				statement.setString(2, normalizedPath);
+				statement.setString(3, name);
+				statement.setLong(4, dataSource.getId());
+			}
+			try (ResultSet rs = connection.executeQuery(statement)) {
+				return skCase.resultSetToAbstractFiles(rs, connection);
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException("SQLException thrown when calling query: " + query + " for parent path = " + path + " and name " + name, ex);
+		} finally {
+			skCase.releaseSingleUserCaseReadLock();
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/FsContent.java b/bindings/java/src/org/sleuthkit/datamodel/FsContent.java
index 7e022bc81776885bf73c65fb3e38f368e44613f2..1e077d83c48f85232730f3e8dca5167d7fa447b2 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/FsContent.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/FsContent.java
@@ -19,6 +19,7 @@
 package org.sleuthkit.datamodel;
 
 import java.util.ArrayList;
+import java.util.Collections;
 import java.util.List;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -105,6 +106,9 @@ public abstract class FsContent extends AbstractFile {
 	 *                           yet been determined.
 	 * @param extension          The extension part of the file name (not
 	 *                           including the '.'), can be null.
+	 * @param ownerUid			 UID of the file owner as found in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
 	 */
 	@SuppressWarnings("deprecation")
 	FsContent(SleuthkitCase db,
@@ -123,8 +127,11 @@ public abstract class FsContent extends AbstractFile {
 			String md5Hash, String sha256Hash, FileKnown knownState,
 			String parentPath,
 			String mimeType,
-			String extension) {
-		super(db, objId, dataSourceObjectId, attrType, attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension);
+			String extension,
+			String ownerUid,
+			Long osAccountObjId,
+			List<Attribute> fileAttributes) {
+		super(db, objId, dataSourceObjectId, attrType, attrId, name, fileType, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, fileAttributes);
 		this.fsObjId = fsObjId;
 	}
 
@@ -385,7 +392,7 @@ public String toString(boolean preserveState) {
 			String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType,
 			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath) {
-		this(db, objId, db.getDataSourceObjectId(objId), fsObjId, attrType, (int) attrId, name, TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null);
+		this(db, objId, db.getDataSourceObjectId(objId), fsObjId, attrType, (int) attrId, name, TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, null, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList() );
 	}
 
 	/**
@@ -444,6 +451,6 @@ public String toString(boolean preserveState) {
 			String name, long metaAddr, int metaSeq, TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType,
 			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid, String md5Hash, FileKnown knownState, String parentPath, String mimeType) {
-		this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, mimeType, null);
+		this(db, objId, dataSourceObjectId, fsObjId, attrType, (int) attrId, name, TSK_DB_FILES_TYPE_ENUM.FS, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, null, knownState, parentPath, mimeType, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList());
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Host.java b/bindings/java/src/org/sleuthkit/datamodel/Host.java
new file mode 100644
index 0000000000000000000000000000000000000000..35f4eaa58fd0b79257fc68bb7752a6dab77b727b
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/Host.java
@@ -0,0 +1,135 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Objects;
+
+/**
+ * Encapsulates a host.
+ */
+public final class Host {
+
+	private final long id;
+	private final String name;
+	private final HostDbStatus status;
+
+	Host(long id, String name) {
+		this(id, name, HostDbStatus.ACTIVE);
+	}
+
+	Host(long id, String name, HostDbStatus status) {
+		this.id = id;
+		this.name = name;
+		this.status = status;
+	}
+
+	/**
+	 * Gets the row id for the host.
+	 *
+	 * @return Row id.
+	 */
+	public long getHostId() {
+		return id;
+	}
+
+	/**
+	 * Gets the name for the host.
+	 *
+	 * @return Host name.
+	 */
+	public String getName() {
+		return name;
+	}
+
+	/**
+	 * Gets the status for the host.
+	 *
+	 * @return Host status.
+	 */
+	HostDbStatus getStatus() {
+		return status;
+	}
+
+	@Override
+	public int hashCode() {
+		int hash = 5;
+		hash = 67 * hash + (int) (this.id ^ (this.id >>> 32));
+		hash = 67 * hash + Objects.hashCode(this.name);
+		return hash;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj) {
+			return true;
+		}
+		if (obj == null) {
+			return false;
+		}
+		if (getClass() != obj.getClass()) {
+			return false;
+		}
+
+		final Host other = (Host) obj;
+		if (this.id != other.id) {
+			return false;
+		}
+
+		if ((this.name == null) ? (other.name != null) : !this.name.equals(other.name)) {
+			return false;
+		}
+
+		return true;
+	}
+
+	/**
+	 * Encapsulates status of host row.
+	 */
+	enum HostDbStatus {
+		ACTIVE(0, "Active"),
+		MERGED(1, "Merged"),
+		DELETED(2, "Deleted");
+
+		private final int id;
+		private final String name;
+
+		HostDbStatus(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		int getId() {
+			return id;
+		}
+
+		String getName() {
+			return name;
+		}
+
+		static HostDbStatus fromID(int typeId) {
+			for (HostDbStatus type : HostDbStatus.values()) {
+				if (type.ordinal() == typeId) {
+					return type;
+				}
+			}
+			return null;
+		}
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/HostAddress.java b/bindings/java/src/org/sleuthkit/datamodel/HostAddress.java
new file mode 100644
index 0000000000000000000000000000000000000000..4b545baee03e78c3acffffa4bc79432bcdafb160
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/HostAddress.java
@@ -0,0 +1,174 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Objects;
+
+/**
+ * Abstracts an address associated with a host. A host may have multiple
+ * addressed of different types associated with it at a given time.
+ */
+public final class HostAddress extends AbstractContent {
+
+	private final SleuthkitCase sleuthkitCase;
+	private final long id;
+	private final HostAddressType addressType;
+	private final String address;
+
+	/**
+	 * Create a HostAddress object.
+	 *
+	 * @param skCase  Case the host address belongs to.
+	 * @param id      Id of the host address in the database.
+	 * @param type    Type of host address.
+	 * @param address The host address value.
+	 */
+	HostAddress(SleuthkitCase skCase, long id, HostAddressType type, String address) {
+		super(skCase, id, address + "(" + type.getName() + ")");
+		this.sleuthkitCase = skCase;
+		this.id = id;
+		this.addressType = type;
+		this.address = address;
+	}
+
+	@Override
+	public long getId() {
+		return id;
+	}
+
+	public HostAddressType getAddressType() {
+		return addressType;
+	}
+
+	public String getAddress() {
+		return address;
+	}
+
+	@Override
+	public int hashCode() {
+		int hash = 7;
+		hash = 53 * hash + (int) (this.id ^ (this.id >>> 32));
+		hash = 53 * hash + Objects.hashCode(this.addressType);
+		hash = 53 * hash + Objects.hashCode(this.address);
+		return hash;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj) {
+			return true;
+		}
+		if (obj == null) {
+			return false;
+		}
+		if (getClass() != obj.getClass()) {
+			return false;
+		}
+		final HostAddress other = (HostAddress) obj;
+		if (this.id != other.id) {
+			return false;
+		}
+
+		if (this.addressType != other.addressType) {
+			return false;
+		}
+
+		if ((this.address == null) ? (other.address != null) : !this.address.equals(other.address)) {
+			return false;
+		}
+
+		return true;
+	}
+
+	/**
+	 * Gets the SleuthKit case database for this account.
+	 *
+	 * @return The SleuthKit case object.
+	 */
+	@Override
+	public SleuthkitCase getSleuthkitCase() {
+		return sleuthkitCase;
+	}
+
+	@Override
+	public int read(byte[] buf, long offset, long len) throws TskCoreException {
+		// No data to read. 
+		return 0;
+	}
+
+	@Override
+	public void close() {
+		// Nothing to close
+	}
+
+	@Override
+	public long getSize() {
+		return 0;
+	}
+
+	@Override
+	public <T> T accept(ContentVisitor<T> v) {
+		// TODO		
+		throw new UnsupportedOperationException("Not supported yet.");
+	}
+
+	@Override
+	public <T> T accept(SleuthkitItemVisitor<T> v) {
+		// TODO
+		throw new UnsupportedOperationException("Not supported yet.");
+	}
+
+	/**
+	 * A host may have different types of addresses at a given point in time.
+	 */
+	public enum HostAddressType {
+		DNS_AUTO(0, "DNS Auto Detection"), // Used to auto-select the DNS type from HOSTNAME, IPV4, and IPV6 when creating HostAddresses
+		HOSTNAME(1, "Host Name"),
+		IPV4(2, "IPv4"),
+		IPV6(3, "IPv6"),
+		ETHERNET_MAC(4, "Ethernet MAC"),
+		WIFI_MAC(5, "WiFi MAC"),
+		BLUETOOTH_MAC(6, "BlueTooth MAC");
+
+		private final int id;
+		private final String name;
+
+		HostAddressType(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		public int getId() {
+			return id;
+		}
+
+		String getName() {
+			return name;
+		}
+
+		public static HostAddressType fromID(int typeId) {
+			for (HostAddressType type : HostAddressType.values()) {
+				if (type.ordinal() == typeId) {
+					return type;
+				}
+			}
+			return null;
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/HostAddressManager.java b/bindings/java/src/org/sleuthkit/datamodel/HostAddressManager.java
new file mode 100644
index 0000000000000000000000000000000000000000..06381ff85c8c512e201639a4dafc001880aefa22
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/HostAddressManager.java
@@ -0,0 +1,777 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.regex.Pattern;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.HostAddress.HostAddressType;
+
+/**
+ * Responsible for creating/updating/retrieving host addresses.
+ */
+public class HostAddressManager {
+
+	private static final Logger LOGGER = Logger.getLogger(HostAddressManager.class.getName());
+
+	private final SleuthkitCase db;
+
+	/**
+	 * An HostAddress Object Id entry is maintained in this cache when a
+	 * hostaddress and ip mapping is added. This is here to improve the
+	 * performance of {@link #hostNameAndIpMappingExists(long) } check.
+	 */
+	private final Cache<Long, Byte> recentHostNameAndIpMappingCache = CacheBuilder.newBuilder().maximumSize(200000).build();
+
+	/**
+	 * Recently added or accessed Host Address Objects are cached. This is
+	 * here to improve performance of the
+	 * {@link #hostAddressExists(org.sleuthkit.datamodel.HostAddress.HostAddressType, java.lang.String)}
+	 * check as well as the {@link #getHostAddress(org.sleuthkit.datamodel.HostAddress.HostAddressType, java.lang.String) }
+	 */
+	private final Cache<String, HostAddress> recentHostAddressCache = CacheBuilder.newBuilder().maximumSize(200000).build();
+
+	/**
+	 * Recently added host address usage is cached. This is intended to improve 
+	 * the performance of {@link #addUsage(org.sleuthkit.datamodel.Content, org.sleuthkit.datamodel.HostAddress) }
+	 * Key: DatasourceId # Host Id # Content Id. Value has no significance. it will be set to true if there is 
+	 * a value in cache for the key.
+	 */
+	private final Cache<String, Boolean> hostAddressUsageCache = CacheBuilder.newBuilder().maximumSize(200000).build();
+
+	/**
+	 * Construct a HostAddressManager for the given SleuthkitCase.
+	 *
+	 * @param skCase The SleuthkitCase
+	 *
+	 */
+	HostAddressManager(SleuthkitCase skCase) {
+		this.db = skCase;
+	}
+
+	/**
+	 * Gets an address record with given type and address.
+	 *
+	 * @param type    Address type.
+	 * @param address Address.
+	 *
+	 * @return Matching address.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Optional<HostAddress> getHostAddress(HostAddress.HostAddressType type, String address) throws TskCoreException {
+		
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			return HostAddressManager.this.getHostAddress(type, address, connection);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets an address record with given type and address.
+	 *
+	 * @param type       Address type.
+	 * @param address    Address.
+	 * @param connection Connection to use for DB operation.
+	 *
+	 * @return Matching address.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Optional<HostAddress> getHostAddress(HostAddress.HostAddressType type, String address, CaseDbConnection connection) throws TskCoreException {
+		
+		HostAddress hostAddress = recentHostAddressCache.getIfPresent(createRecentHostAddressKey(type, address));
+		if (Objects.nonNull(hostAddress)) {
+			return Optional.of(hostAddress);
+		}
+		HostAddress.HostAddressType addressType = type;
+		if (type.equals(HostAddress.HostAddressType.DNS_AUTO)) {
+			addressType = getDNSType(address);
+		}
+		String queryString = "SELECT * FROM tsk_host_addresses"
+				+ " WHERE address = ?  AND address_type = ?";			
+		try {
+			PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);
+			query.clearParameters();
+			query.setString(1, address.toLowerCase());
+			query.setInt(2, addressType.getId());
+			try (ResultSet rs = query.executeQuery()) {
+				if (!rs.next()) {
+					return Optional.empty();	// no match found
+				} else {
+					HostAddress newHostAddress = new HostAddress(db, rs.getLong("id"), HostAddressType.fromID(rs.getInt("address_type")), address);
+					recentHostAddressCache.put(createRecentHostAddressKey(newHostAddress.getAddressType(), address), newHostAddress);
+					return Optional.of(newHostAddress);					
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host address with type = %s and address = %s", type.getName(), address), ex);
+		} 
+	}
+
+	/**
+	 * Create a key string for use as a cache key.
+	 *
+	 * @param type    Address type.
+	 * @param address Address.
+	 *
+	 * @return Cache key defined as typeId + # + address lowercased.
+	 */
+	private String createRecentHostAddressKey(HostAddressType type, String address) {
+		return createRecentHostAddressKey(type.getId(), address);
+	}
+
+	/**
+	 * Create a key string for use as a cache key.
+	 *
+	 * @param typeId  Address type Id.
+	 * @param address Address.
+	 *
+	 * @return Cache key defined as typeId + # + address lowercased.
+	 */
+	private String createRecentHostAddressKey(int typeId, String address) {
+		return typeId + "#" + address.toLowerCase();
+	}
+
+	/**
+	 * Add a new address with the given type and address. If the address already
+	 * exists in the database, the existing entry will be returned.
+	 *
+	 * @param type    Address type.
+	 * @param address Address (case-insensitive).
+	 *
+	 * @return HostAddress
+	 *
+	 * @throws TskCoreException
+	 */
+	public HostAddress newHostAddress(HostAddress.HostAddressType type, String address) throws TskCoreException {
+		db.acquireSingleUserCaseWriteLock();
+		CaseDbConnection connection = this.db.getConnection();
+		try {
+			return HostAddressManager.this.newHostAddress(type, address, connection);
+		} catch (TskCoreException ex) {
+			// The insert may have failed because the HostAddress already exists, so
+			// try loading it from the database.
+			Optional<HostAddress> hostAddress = HostAddressManager.this.getHostAddress(type, address, connection);
+			if (hostAddress.isPresent()) {
+				return hostAddress.get();
+			}
+			throw ex;
+		} finally {
+			connection.close(); 
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Insert a row in the tsk_host_addresses with the given type and address.
+	 *
+	 * @param type       Address type.
+	 * @param address    Address.
+	 * @param connection Database connection to use.
+	 *
+	 * @return HostAddress.
+	 *
+	 * @throws TskCoreException
+	 */
+	private HostAddress newHostAddress(HostAddress.HostAddressType type, String address, CaseDbConnection connection) throws TskCoreException {
+		HostAddress.HostAddressType addressType = type;
+		if (type.equals(HostAddress.HostAddressType.DNS_AUTO)) {
+			addressType = getDNSType(address);
+		}
+		
+		try {
+
+			// TODO: need to get the correct parent obj id.  
+			long parentObjId = 0;
+			int objTypeId = TskData.ObjectType.HOST_ADDRESS.getObjectType();
+
+			long objId = db.addObject(parentObjId, objTypeId, connection);
+
+			String hostAddressInsertSQL = "INSERT INTO tsk_host_addresses(id, address_type, address) VALUES (?, ?, ?)"; // NON-NLS
+			PreparedStatement preparedStatement = connection.getPreparedStatement(hostAddressInsertSQL, Statement.RETURN_GENERATED_KEYS);
+
+			preparedStatement.clearParameters();
+			preparedStatement.setLong(1, objId);
+			preparedStatement.setInt(2, addressType.getId());
+			preparedStatement.setString(3, address.toLowerCase());
+
+			connection.executeUpdate(preparedStatement);
+			HostAddress hostAddress =  new HostAddress(db, objId, addressType, address);
+			recentHostAddressCache.put(createRecentHostAddressKey(addressType, address), hostAddress);
+			return hostAddress;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error adding host address of type = %s, with address = %s", type.getName(), address), ex);
+		} 
+	}
+
+	/**
+	 * Add a host to address mapping.
+	 *
+	 * @param host	       Host.
+	 * @param hostAddress Address.
+	 * @param time        Time at which the mapping was valid.
+	 * @param source      Content from where this mapping was derived.
+	 *
+	 * @throws TskCoreException
+	 */
+	public void assignHostToAddress(Host host, HostAddress hostAddress, Long time, Content source) throws TskCoreException {
+
+		String insertSQL = db.getInsertOrIgnoreSQL(" INTO tsk_host_address_map(host_id, addr_obj_id, source_obj_id, time) "
+				+ " VALUES(?, ?, ?, ?) ");
+
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+
+			PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQL, Statement.NO_GENERATED_KEYS);
+
+			preparedStatement.clearParameters();
+			preparedStatement.setLong(1, host.getHostId());
+			preparedStatement.setLong(2, hostAddress.getId());
+			preparedStatement.setLong(3, source.getId());
+			if (time != null) {
+				preparedStatement.setLong(4, time);
+			} else {
+				preparedStatement.setNull(4, java.sql.Types.BIGINT);
+			}
+
+			connection.executeUpdate(preparedStatement);
+		} catch (SQLException ex) {
+			LOGGER.log(Level.SEVERE, null, ex);
+			throw new TskCoreException(String.format("Error adding host address mapping for host name = %s,  with address = %s", host.getName(), hostAddress.getAddress()), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Get all the addresses that have been assigned to the given host.
+	 *
+	 * @param host Host to get addresses for.
+	 *
+	 * @return List of addresses, may be empty.
+	 */
+	List<HostAddress> getHostAddressesAssignedTo(Host host) throws TskCoreException {
+
+		String queryString = "SELECT addr_obj_id FROM tsk_host_address_map "
+				+ " WHERE host_id = " + host.getHostId();
+
+		List<HostAddress> addresses = new ArrayList<>();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			while (rs.next()) {
+				addresses.add(HostAddressManager.this.getHostAddress(rs.getLong("addr_obj_id"), connection));
+			}
+
+			return addresses;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host addresses for host " + host.getName()), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets an address for the given object id.
+	 *
+	 * @param id Object id.
+	 *
+	 * @return The corresponding HostAddress object.
+	 *
+	 * @throws TskCoreException
+	 */
+	public HostAddress getHostAddress(long id) throws TskCoreException {
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			return HostAddressManager.this.getHostAddress(id, connection);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets an address for the given object id.
+	 *
+	 * @param id         Id of the host address.
+	 * @param connection Current connection
+	 *
+	 * @return The corresponding HostAddress.
+	 *
+	 * @throws TskCoreException
+	 */
+	private HostAddress getHostAddress(long id, CaseDbConnection connection) throws TskCoreException {
+		String queryString = "SELECT * FROM tsk_host_addresses"
+				+ " WHERE id = " + id;
+
+		try (Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (!rs.next()) {
+				throw new TskCoreException(String.format("No address found with id = %d", id));
+			} else {
+				long objId = rs.getLong("id");
+				int type = rs.getInt("address_type");
+				String address = rs.getString("address");
+				HostAddress hostAddress = new HostAddress(db, objId, HostAddress.HostAddressType.fromID(type), address);
+				recentHostAddressCache.put(createRecentHostAddressKey(type, address), hostAddress);
+				return hostAddress;
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host address with id = %d", id), ex);
+		} 
+	}
+
+	/**
+	 * Adds a row to the ipAddress table.
+	 *
+	 * @param dnsNameAddress The DNS name.
+	 * @param ipAddress      An IP address associated with the DNS name.
+	 * @param time           Timestamp when this relationship was true.
+	 * @param source         The source.
+	 *
+	 * @throws TskCoreException
+	 */
+	public void addHostNameAndIpMapping(HostAddress dnsNameAddress, HostAddress ipAddress, Long time, Content source) throws TskCoreException {
+
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			addHostNameAndIpMapping(dnsNameAddress, ipAddress, time, source, connection);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error adding host DNS address mapping for DNS name = %s, and IP address = %s", dnsNameAddress.getAddress(), ipAddress.getAddress()), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Adds a row to the host address dns ip map table.
+	 *
+	 * @param dnsNameAddress    The DNS name.
+	 * @param ipAddress         An IP address associated with the DNS name.
+	 * @param time              Timestamp when this relationship was true.
+	 * @param source            The source.
+	 * @param caseDbTransaction The transaction in the scope of which the
+	 *                          operation is to be performed, managed by the
+	 *                          caller. Null is not permitted.
+	 *
+	 * @throws TskCoreException
+	 */
+	public void addHostNameAndIpMapping(HostAddress dnsNameAddress, HostAddress ipAddress, Long time, Content source, final SleuthkitCase.CaseDbTransaction caseDbTransaction) throws TskCoreException {
+
+		if (Objects.isNull(caseDbTransaction)) {
+			throw new TskCoreException(String.format("Error adding host DNS address mapping for DNS name = %s, and IP address = %s, null caseDbTransaction passed to addHostNameAndIpMapping", dnsNameAddress.getAddress(), ipAddress.getAddress()));
+		}
+		try {
+			addHostNameAndIpMapping(dnsNameAddress, ipAddress, time, source, caseDbTransaction.getConnection());
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error adding host DNS address mapping for DNS name = %s, and IP address = %s", dnsNameAddress.getAddress(), ipAddress.getAddress()), ex);
+		}
+	}
+
+	/**
+	 * Adds a row to the host address dns ip map table.
+	 *
+	 * @param dnsNameAddress The DNS name.
+	 * @param ipAddress      An IP address associated with the DNS name.
+	 * @param time           Timestamp when this relationship was true.
+	 * @param source         The source.
+	 * @param connection     The db connection. Null is not permitted.
+	 *
+	 * @throws TskCoreException
+	 */
+	private void addHostNameAndIpMapping(HostAddress dnsNameAddress, HostAddress ipAddress, Long time, Content source, final CaseDbConnection connection) throws SQLException, TskCoreException {
+
+		if (dnsNameAddress.getAddressType() != HostAddress.HostAddressType.HOSTNAME) {
+			throw new TskCoreException("IllegalArguments passed to addHostNameAndIpMapping: A host name address is expected.");
+		}
+		if ((ipAddress.getAddressType() != HostAddress.HostAddressType.IPV4) && (ipAddress.getAddressType() != HostAddress.HostAddressType.IPV6)) {
+			throw new TskCoreException("IllegalArguments passed to addHostNameAndIpMapping:An IPv4/IPv6 address is expected.");
+		}
+		if (Objects.isNull(connection)) {
+			throw new TskCoreException("IllegalArguments passed to addHostNameAndIpMapping: null connection passed to addHostNameAndIpMapping");
+		}
+
+		String insertSQL = db.getInsertOrIgnoreSQL(" INTO tsk_host_address_dns_ip_map(dns_address_id, ip_address_id, source_obj_id, time) "
+				+ " VALUES(?, ?, ?, ?) ");
+
+		PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQL, Statement.NO_GENERATED_KEYS);
+
+		preparedStatement.clearParameters();
+		preparedStatement.setLong(1, dnsNameAddress.getId());
+		preparedStatement.setLong(2, ipAddress.getId());
+		preparedStatement.setLong(3, source.getId());
+		if (time != null) {
+			preparedStatement.setLong(4, time);
+		} else {
+			preparedStatement.setNull(4, java.sql.Types.BIGINT);
+		}
+		connection.executeUpdate(preparedStatement);
+		recentHostNameAndIpMappingCache.put(ipAddress.getId(), new Byte((byte) 1));
+		recentHostNameAndIpMappingCache.put(dnsNameAddress.getId(), new Byte((byte) 1));
+	}
+
+	/**
+	 * Returns true if addressObjectId is used as either IP or host name
+	 * <br>
+	 * <b>Note:</b> This api call uses a database connection. Do not invoke
+	 * within a transaction.
+	 *
+	 * @param addressObjectId
+	 *
+	 * @return
+	 *
+	 * @throws TskCoreException
+	 */
+	public boolean hostNameAndIpMappingExists(long addressObjectId) throws TskCoreException {
+
+		Byte isPresent = recentHostNameAndIpMappingCache.getIfPresent(addressObjectId);
+
+		if (Objects.nonNull(isPresent)) {
+			return true;
+		}
+
+		String queryString = "SELECT count(*) as mappingCount FROM tsk_host_address_dns_ip_map WHERE ip_address_id = ? OR dns_address_id = ? ";
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				PreparedStatement ps = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);) {
+			ps.clearParameters();
+			ps.setLong(1, addressObjectId);
+			ps.setLong(2, addressObjectId);
+			try (ResultSet rs = ps.executeQuery()) {
+				if (!rs.next()) {
+					return false;
+				} else {
+					boolean status = rs.getLong("mappingCount") > 0;
+					if (status) {
+						recentHostNameAndIpMappingCache.put(addressObjectId, new Byte((byte) 1));
+					}
+					return status;
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error looking up host address / Ip mapping for address = " + addressObjectId, ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Returns ObjectId of HostAddress if it exists.
+	 * <br>
+	 * <b>Note:</b> This api call uses a database connection. Do not invoke
+	 * within a transaction.
+	 *
+	 * @param type
+	 * @param address
+	 *
+	 * @return
+	 *
+	 * @throws TskCoreException
+	 */
+	public Optional<Long> hostAddressExists(HostAddress.HostAddressType type, String address) throws TskCoreException {
+
+		HostAddress hostAddress = recentHostAddressCache.getIfPresent(createRecentHostAddressKey(type, address));
+		if (Objects.nonNull(hostAddress)) {
+			return Optional.of(hostAddress.getId());
+		}
+
+		HostAddress.HostAddressType addressType = type;
+		if (type.equals(HostAddress.HostAddressType.DNS_AUTO)) {
+			addressType = getDNSType(address);
+		} 
+		
+		String queryString = "SELECT id, address_type, address FROM tsk_host_addresses"
+				+ " WHERE address = ?  AND address_type = ?"; 
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);) {
+			query.clearParameters();
+			query.setString(1, address.toLowerCase());
+			query.setInt(2, addressType.getId());
+			try (ResultSet rs = query.executeQuery()) {
+				if (!rs.next()) {
+					return Optional.empty();	// no match found
+				} else {
+					long objId = rs.getLong("id");
+					int addrType = rs.getInt("address_type");
+					String addr = rs.getString("address");
+					HostAddress hostAddr = new HostAddress(db, objId, HostAddress.HostAddressType.fromID(addrType), addr);
+					recentHostAddressCache.put(createRecentHostAddressKey(addrType, address), hostAddr);					
+					return Optional.of(objId);
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host address with type = %s and address = %s", type.getName(), address), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets the IP addresses for a given HOSTNAME name.
+	 *
+	 * @param hostname HOSTNAME name to look for.
+	 *
+	 * @return List of IP Addresses mapped to this dns name. May be empty.
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<HostAddress> getIpAddress(String hostname) throws TskCoreException {
+		String queryString = "SELECT ip_address_id FROM tsk_host_address_dns_ip_map as map "
+				+ " JOIN tsk_host_addresses as addresses "
+				+ " ON map.dns_address_id = addresses.id "
+				+ " WHERE addresses.address_type = " + HostAddress.HostAddressType.HOSTNAME.getId()
+				+ " AND addresses.address = ?";
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			List<HostAddress> IpAddresses = new ArrayList<>();
+			PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);
+			query.clearParameters();
+			query.setString(1, hostname.toLowerCase());
+			try (ResultSet rs = query.executeQuery()) {
+				while (rs.next()) {
+					long ipAddressObjId = rs.getLong("ip_address_id");
+					IpAddresses.add(HostAddressManager.this.getHostAddress(ipAddressObjId, connection));
+					recentHostNameAndIpMappingCache.put(ipAddressObjId, new Byte((byte) 1));
+				}
+				return IpAddresses;
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host addresses for host name: " + hostname), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets the host names for a given IP address.
+	 *
+	 * @param ipAddress IP address to look for.
+	 *
+	 * @return All corresponding host names.
+	 *
+	 * @throws TskCoreException
+	 */
+	List<HostAddress> getHostNameByIp(String ipAddress) throws TskCoreException {
+		String queryString = "SELECT dns_address_id FROM tsk_host_address_dns_ip_map as map "
+				+ " JOIN tsk_host_addresses as addresses "
+				+ " ON map.ip_address_id = addresses.id "
+				+ " WHERE ( addresses.address_type = " + HostAddress.HostAddressType.IPV4.getId()
+				+ " OR  addresses.address_type = " + HostAddress.HostAddressType.IPV6.getId() + ")"
+				+ " AND addresses.address = ?";
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			List<HostAddress> dnsNames = new ArrayList<>();
+			PreparedStatement query = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);
+			query.clearParameters();
+			query.setString(1, ipAddress.toLowerCase());
+			try (ResultSet rs = query.executeQuery()) {
+				while (rs.next()) {
+					long dnsAddressId = rs.getLong("dns_address_id");
+					dnsNames.add(HostAddressManager.this.getHostAddress(dnsAddressId, connection));
+					recentHostNameAndIpMappingCache.put(dnsAddressId, new Byte((byte) 1));
+				}
+				return dnsNames;
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host addresses for IP address: " + ipAddress), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Associate the given artifact with a HostAddress.
+	 *
+	 * @param content     The content/item using the address.
+	 * @param hostAddress The host address.
+	 */
+	public void addUsage(Content content, HostAddress hostAddress) throws TskCoreException {
+		
+		String key = content.getDataSource().getId() + "#" + hostAddress.getId() + "#" + content.getId();
+		Boolean cachedValue = hostAddressUsageCache.getIfPresent(key);
+		if (null != cachedValue) {
+			return;
+		}
+		
+		final String insertSQL = db.getInsertOrIgnoreSQL(" INTO tsk_host_address_usage(addr_obj_id, obj_id, data_source_obj_id) "
+				+ " VALUES(" + hostAddress.getId() + ", " + content.getId() + ", " + content.getDataSource().getId() + ") ");
+
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement()) {
+			connection.executeUpdate(s, insertSQL);
+			hostAddressUsageCache.put(key, true);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error associating host address %s with artifact with id %d", hostAddress.getAddress(), content.getId()), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	private final String ADDRESS_USAGE_QUERY = "SELECT addresses.id as id, addresses.address_type as address_type, addresses.address as address "
+			+ " FROM tsk_host_address_usage as usage "
+			+ " JOIN tsk_host_addresses as addresses "
+			+ " ON usage.addr_obj_id = addresses.id ";
+
+	/**
+	 * Get all the addresses that have been used by the given content.
+	 *
+	 * @param content Content to get addresses used for.
+	 *
+	 * @return List of addresses, may be empty.
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<HostAddress> getHostAddressesUsedByContent(Content content) throws TskCoreException {
+		String queryString = ADDRESS_USAGE_QUERY
+				+ " WHERE usage.obj_id = " + content.getId();
+
+		return getHostAddressesUsed(queryString);
+	}
+
+	/**
+	 * Get all the addresses that have been used by the given data source.
+	 *
+	 * @param dataSource Data source to get addresses used for.
+	 *
+	 * @return List of addresses, may be empty.
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<HostAddress> getHostAddressesUsedOnDataSource(Content dataSource) throws TskCoreException {
+		String queryString = ADDRESS_USAGE_QUERY
+				+ " WHERE usage.data_source_obj_id = " + dataSource.getId();
+
+		return getHostAddressesUsed(queryString);
+	}
+
+	/**
+	 * Gets the host addresses used by running the given query.
+	 *
+	 * @param addressesUsedSQL SQL query to run.
+	 *
+	 * @return List of addresses, may be empty.
+	 *
+	 * @throws TskCoreException
+	 */
+	private List<HostAddress> getHostAddressesUsed(String addressesUsedSQL) throws TskCoreException {
+
+		List<HostAddress> addressesUsed = new ArrayList<>();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, addressesUsedSQL)) {
+
+			while (rs.next()) {
+				addressesUsed.add(new HostAddress(db, rs.getLong("id"), HostAddress.HostAddressType.fromID(rs.getInt("address_type")), rs.getString("address")));
+			}
+			return addressesUsed;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host addresses used with query string = %s", addressesUsedSQL), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Detects format of address.
+	 *
+	 * @param address The address.
+	 *
+	 * @return The detected type.
+	 */
+	private HostAddress.HostAddressType getDNSType(String address) {
+		if (isIPv4(address)) {
+			return HostAddress.HostAddressType.IPV4;
+		} else if (isIPv6(address)) {
+			return HostAddress.HostAddressType.IPV6;
+		} else {
+			return HostAddress.HostAddressType.HOSTNAME;
+		}
+	}
+
+	private static final Pattern IPV4_PATTERN =
+            Pattern.compile("^(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(\\.(?!$)|$)){4}$");
+	/**
+	 * Test if an address is IPv4.
+	 *
+	 * @param ipAddress The address.
+	 *
+	 * @return true if it is IPv4 format, false otherwise.
+	 */
+	private static boolean isIPv4(String ipAddress) {
+		if (ipAddress != null) {
+			return IPV4_PATTERN.matcher(ipAddress).matches();
+		}
+		return false;
+	}
+
+	
+	private static final Pattern IPV6_STD_PATTERN = 
+            Pattern.compile("^(?:[0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$");
+    private static final Pattern IPV6_HEX_COMPRESSED_PATTERN = 
+            Pattern.compile("^((?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)::((?:[0-9A-Fa-f]{1,4}(?::[0-9A-Fa-f]{1,4})*)?)$");
+
+   
+    private static boolean isIPv6StdAddress(final String input) {
+        return IPV6_STD_PATTERN.matcher(input).matches();
+    }
+    private static boolean isIPv6HexCompressedAddress(final String input) {
+        return IPV6_HEX_COMPRESSED_PATTERN.matcher(input).matches();
+    }
+	
+	/**
+	 * Test if an address is IPv6.
+	 *
+	 * @param ipAddress The address.
+	 *
+	 * @return true if it is IPv6 format, false otherwise.
+	 */
+	private static boolean isIPv6(String ipAddress) {
+	
+		if (ipAddress != null) {
+			 return isIPv6StdAddress(ipAddress) || isIPv6HexCompressedAddress(ipAddress);
+		}
+
+		return false;
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/HostManager.java b/bindings/java/src/org/sleuthkit/datamodel/HostManager.java
new file mode 100755
index 0000000000000000000000000000000000000000..d8255b2e7823a93e715bb3264b3f2794ac95f6ab
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/HostManager.java
@@ -0,0 +1,588 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.base.Strings;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.Savepoint;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.UUID;
+import org.sleuthkit.datamodel.Host.HostDbStatus;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
+import org.sleuthkit.datamodel.TskEvent.HostsUpdatedTskEvent;
+import org.sleuthkit.datamodel.TskEvent.HostsDeletedTskEvent;
+
+/**
+ * Responsible for creating/updating/retrieving Hosts.
+ */
+public final class HostManager {
+
+	private final SleuthkitCase db;
+
+	/**
+	 * Construct a HostManager for the given SleuthkitCase.
+	 *
+	 * @param skCase The SleuthkitCase
+	 *
+	 */
+	HostManager(SleuthkitCase skCase) {
+		this.db = skCase;
+	}
+
+	/**
+	 * Create a host with specified name. If a host already exists with the
+	 * given name, it returns the existing host.
+	 *
+	 * @param name	Host name.
+	 *
+	 * @return Host with the specified name.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Host newHost(String name) throws TskCoreException {
+		CaseDbTransaction transaction = db.beginTransaction();
+		try {
+			Host host = newHost(name, transaction);
+			transaction.commit();
+			transaction = null;
+			return host;
+		} finally {
+			if (transaction != null) {
+				transaction.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Create a host with given name. If the host already exists, the existing
+	 * host will be returned.
+	 *
+	 * NOTE: Whenever possible, create hosts as part of a single step
+	 * transaction so that it can quickly determine a host of the same name
+	 * already exists. If you call this as part of a multi-step
+	 * CaseDbTransaction, then this method may think it can insert the host
+	 * name, but then when it comes time to call CaseDbTransaction.commit(),
+	 * there could be a uniqueness constraint violation and other inserts in the
+	 * same transaction could have problems.
+	 *
+	 * This method should never be made public and exists only because we need
+	 * to support APIs that do not take in a host and we must make one. Ensure
+	 * that if you call this method that the host name you give will be unique.
+	 *
+	 * @param name  Host name that must be unique if this is called as part of a
+	 *              multi-step transaction
+	 * @param trans Database transaction to use.
+	 *
+	 * @return Newly created host.
+	 *
+	 * @throws TskCoreException
+	 */
+	Host newHost(String name, CaseDbTransaction trans) throws TskCoreException {
+		// must have a name
+		if (Strings.isNullOrEmpty(name)) {
+			throw new TskCoreException("Illegal argument passed to createHost: Host name is required.");
+		}
+
+		CaseDbConnection connection = trans.getConnection();
+		Savepoint savepoint = null;
+
+		try {
+			savepoint = connection.getConnection().setSavepoint();
+			String hostInsertSQL = "INSERT INTO tsk_hosts(name) VALUES (?)"; // NON-NLS
+			PreparedStatement preparedStatement = connection.getPreparedStatement(hostInsertSQL, Statement.RETURN_GENERATED_KEYS);
+
+			preparedStatement.clearParameters();
+			preparedStatement.setString(1, name);
+
+			connection.executeUpdate(preparedStatement);
+
+			// Read back the row id
+			Host host = null;
+			try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) {
+				if (resultSet.next()) {
+					host = new Host(resultSet.getLong(1), name); //last_insert_rowid()
+				} else {
+					throw new SQLException("Error executing  " + hostInsertSQL);
+				}
+			}
+
+			if (host != null) {
+				trans.registerAddedHost(host);
+			}
+			return host;
+		} catch (SQLException ex) {
+			if (savepoint != null) {
+				try {
+					connection.getConnection().rollback(savepoint);
+				} catch (SQLException ex2) {
+					throw new TskCoreException(String.format("Error adding host with name = %s and unable to rollback", name), ex);
+				}
+			}
+
+			// It may be the case that the host already exists, so try to get it.
+			Optional<Host> optHost = getHostByName(name, connection);
+			if (optHost.isPresent()) {
+				return optHost.get();
+			}
+			throw new TskCoreException(String.format("Error adding host with name = %s", name), ex);
+		}
+	}
+
+	/**
+	 * Updates the name of the provided host.
+	 *
+	 * @param host    The host to be updated.
+	 * @param newName The new name of the host.
+	 *
+	 * @return The updated host.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Host updateHostName(Host host, String newName) throws TskCoreException {
+		if (host == null) {
+			throw new TskCoreException("Illegal argument passed to updateHost: No host argument provided.");
+		} else if (newName == null) {
+			throw new TskCoreException(String.format("Illegal argument passed to updateHost: Host with id %d has no name", host.getHostId()));
+		}
+
+		long hostId = host.getHostId();
+		Host updatedHost = null;
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			// Don't update the name for non-active hosts
+			String hostInsertSQL = "UPDATE tsk_hosts "
+					+ "SET name = "
+					+ "   CASE WHEN db_status = " + Host.HostDbStatus.ACTIVE.getId() + " THEN ? ELSE name END "
+					+ "WHERE id = ?";
+
+			PreparedStatement preparedStatement = connection.getPreparedStatement(hostInsertSQL, Statement.RETURN_GENERATED_KEYS);
+
+			preparedStatement.clearParameters();
+			preparedStatement.setString(1, newName);
+			preparedStatement.setLong(2, hostId);
+
+			connection.executeUpdate(preparedStatement);
+
+			updatedHost = getHostById(hostId, connection).orElseThrow(()
+					-> new TskCoreException((String.format("Error while fetching newly updated host with id: %d, "))));
+
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating host with name = %s", newName), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+
+		if (updatedHost != null) {
+			fireChangeEvent(updatedHost);
+		}
+		return updatedHost;
+	}
+
+	/**
+	 * Delete a host. Name comparison is case-insensitive.
+	 *
+	 * @param name Name of the host to delete.
+	 *
+	 * @return The id of the deleted host or null if no host was deleted.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Long deleteHost(String name) throws TskCoreException {
+		if (name == null) {
+			throw new TskCoreException("Illegal argument passed to deleteHost: Name provided must be non-null");
+		}
+
+		// query to check if there are any dependencies on this host.  If so, don't delete.
+		String queryString = "SELECT COUNT(*) AS count FROM\n"
+				+ "(SELECT obj_id AS id, host_id FROM data_source_info\n"
+				+ "UNION\n"
+				+ "SELECT id, scope_host_id AS host_id FROM tsk_os_account_realms\n"
+				+ "UNION\n"
+				+ "SELECT id, host_id FROM tsk_os_account_attributes\n"
+				+ "UNION\n"
+				+ "SELECT id, host_id FROM tsk_host_address_map) children\n"
+				+ "INNER JOIN tsk_hosts h ON children.host_id = h.id WHERE LOWER(h.name)=LOWER(?)";
+
+		String deleteString = "DELETE FROM tsk_hosts WHERE LOWER(name) = LOWER(?)";
+
+		CaseDbTransaction trans = this.db.beginTransaction();
+		try {
+			// check if host has any child data sources.  if so, don't delete and throw exception.
+			PreparedStatement query = trans.getConnection().getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);
+			query.clearParameters();
+			query.setString(1, name);
+			try (ResultSet queryResults = query.executeQuery()) {
+				if (queryResults.next() && queryResults.getLong("count") > 0) {
+					throw new TskCoreException(String.format("Host with name '%s' has child data and cannot be deleted.", name));
+				}
+			}
+
+			// otherwise, delete the host
+			PreparedStatement update = trans.getConnection().getPreparedStatement(deleteString, Statement.RETURN_GENERATED_KEYS);
+			update.clearParameters();
+			update.setString(1, name);
+			int numUpdated = update.executeUpdate();
+
+			// get ids for deleted.
+			Long hostId = null;
+
+			if (numUpdated > 0) {
+				try (ResultSet updateResult = update.getGeneratedKeys()) {
+					if (updateResult.next()) {
+						hostId = updateResult.getLong(1);
+					}
+				}
+			}
+
+			trans.commit();
+			trans = null;
+
+			fireDeletedEvent(new Host(hostId, name));
+			return hostId;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error deleting host with name %s", name), ex);
+		} finally {
+			if (trans != null) {
+				trans.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Get all data sources associated with a given host.
+	 *
+	 * @param host The host.
+	 *
+	 * @return The list of data sources corresponding to the host.
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<DataSource> getDataSourcesForHost(Host host) throws TskCoreException {
+		String queryString = "SELECT * FROM data_source_info WHERE host_id = " + host.getHostId();
+
+		List<DataSource> dataSources = new ArrayList<>();
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			while (rs.next()) {
+				dataSources.add(db.getDataSource(rs.getLong("obj_id")));
+			}
+
+			return dataSources;
+		} catch (SQLException | TskDataException ex) {
+			throw new TskCoreException(String.format("Error getting data sources for host " + host.getName()), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get active host with given name.
+	 *
+	 * @param name Host name to look for.
+	 *
+	 * @return Optional with host. Optional.empty if no matching host is found.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Optional<Host> getHostByName(String name) throws TskCoreException {
+		try (CaseDbConnection connection = db.getConnection()) {
+			return getHostByName(name, connection);
+		}
+	}
+
+	/**
+	 * Get active host with given name.
+	 *
+	 * @param name       Host name to look for.
+	 * @param connection Database connection to use.
+	 *
+	 * @return Optional with host. Optional.empty if no matching host is found.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Optional<Host> getHostByName(String name, CaseDbConnection connection) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_hosts"
+				+ " WHERE LOWER(name) = LOWER(?)"
+				+ " AND db_status = " + Host.HostDbStatus.ACTIVE.getId();
+
+		db.acquireSingleUserCaseReadLock();
+		try {
+			PreparedStatement s = connection.getPreparedStatement(queryString, Statement.RETURN_GENERATED_KEYS);
+			s.clearParameters();
+			s.setString(1, name);
+
+			try (ResultSet rs = s.executeQuery()) {
+				if (!rs.next()) {
+					return Optional.empty();	// no match found
+				} else {
+					return Optional.of(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status"))));
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host with name = %s", name), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get host with the given id.
+	 *
+	 * @param id The id of the host.
+	 *
+	 * @return Optional with host. Optional.empty if no matching host is found.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Optional<Host> getHostById(long id) throws TskCoreException {
+		try (CaseDbConnection connection = db.getConnection()) {
+			return getHostById(id, connection);
+		}
+	}
+
+	/**
+	 * Get host with given id.
+	 *
+	 * @param id	        The id of the host.
+	 * @param connection Database connection to use.
+	 *
+	 * @return Optional with host. Optional.empty if no matching host is found.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Optional<Host> getHostById(long id, CaseDbConnection connection) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_hosts WHERE id = " + id;
+
+		db.acquireSingleUserCaseReadLock();
+		try (Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (rs.next()) {
+				return Optional.of(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status"))));
+			} else {
+				return Optional.empty();
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host with id: " + id), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get all hosts that have a status of ACTIVE.
+	 *
+	 * @return Collection of hosts that have ACTIVE status.
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<Host> getAllHosts() throws TskCoreException {
+		String queryString = "SELECT * FROM tsk_hosts WHERE db_status = " + HostDbStatus.ACTIVE.getId();
+
+		List<Host> hosts = new ArrayList<>();
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			while (rs.next()) {
+				hosts.add(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status"))));
+			}
+
+			return hosts;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting hosts"), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get host for the given data source.
+	 *
+	 * @param dataSource The data source to look up the host for.
+	 *
+	 * @return The host for this data source (will not be null).
+	 *
+	 * @throws TskCoreException if no host is found or an error occurs.
+	 */
+	public Host getHostByDataSource(DataSource dataSource) throws TskCoreException {
+		return getHostByDataSource(dataSource.getId());
+	}	
+	
+	/**
+	 * Get host for the given data source ID.
+	 *
+	 * @param dataSourceId The data source ID to look up the host for.
+	 *
+	 * @return The host for this data source (will not be null).
+	 *
+	 * @throws TskCoreException if no host is found or an error occurs.
+	 */	
+	Host getHostByDataSource(long dataSourceId) throws TskCoreException {
+		String queryString = "SELECT tsk_hosts.id AS hostId, tsk_hosts.name AS name, tsk_hosts.db_status AS db_status FROM \n"
+				+ "tsk_hosts INNER JOIN data_source_info \n"
+				+ "ON tsk_hosts.id = data_source_info.host_id \n"
+				+ "WHERE data_source_info.obj_id = " + dataSourceId;
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (!rs.next()) {
+				throw new TskCoreException(String.format("Host not found for data source with ID = %d", dataSourceId));
+			} else {
+				return new Host(rs.getLong("hostId"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status")));
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting host for data source with ID = %d", dataSourceId), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Merge source host into destination host. When complete: - All realms will
+	 * have been moved into the destination host or merged with existing realms
+	 * in the destination host. - All references to the source host will be
+	 * updated to reference the destination host. - The source host will be
+	 * updated so that it will no longer be returned by any methods apart from
+	 * get by host id.
+	 *
+	 * @param sourceHost The source host.
+	 * @param destHost   The destination host.
+	 *
+	 * @throws TskCoreException
+	 */
+	public void mergeHosts(Host sourceHost, Host destHost) throws TskCoreException {
+		String query = "";
+		CaseDbTransaction trans = null;
+		try {
+			trans = db.beginTransaction();
+
+			// Merge or move any realms associated with the source host
+			List<OsAccountRealm> realms = db.getOsAccountRealmManager().getRealmsByHost(sourceHost, trans.getConnection());
+			for (OsAccountRealm realm : realms) {
+				db.getOsAccountRealmManager().moveOrMergeRealm(realm, destHost, trans);
+			}
+
+			try (Statement s = trans.getConnection().createStatement()) {
+				// Update references to the source host
+
+				// tsk_host_address_map has a unique constraint on host_id, addr_obj_id, time,
+				// so delete any rows that would be duplicates.
+				query = "DELETE FROM tsk_host_address_map "
+						+ "WHERE id IN ( "
+						+ "SELECT "
+						+ "  sourceMapRow.id "
+						+ "FROM "
+						+ "  tsk_host_address_map destMapRow "
+						+ "INNER JOIN tsk_host_address_map sourceMapRow ON destMapRow.addr_obj_id = sourceMapRow.addr_obj_id AND destMapRow.time = sourceMapRow.time "
+						+ "WHERE destMapRow.host_id = " + destHost.getHostId()
+						+ " AND sourceMapRow.host_id = " + sourceHost.getHostId() + " )";
+				s.executeUpdate(query);
+				query = makeOsAccountUpdateQuery("tsk_host_address_map", "host_id", sourceHost, destHost);
+				s.executeUpdate(query);
+
+				query = makeOsAccountUpdateQuery("tsk_os_account_attributes", "host_id", sourceHost, destHost);
+				s.executeUpdate(query);
+
+				query = makeOsAccountUpdateQuery("data_source_info", "host_id", sourceHost, destHost);
+				s.executeUpdate(query);
+
+				// Mark the source host as merged and change the name to a random string.
+				String mergedName = makeMergedHostName();
+				query = "UPDATE tsk_hosts SET merged_into = " + destHost.getHostId()
+						+ ", db_status = " + Host.HostDbStatus.MERGED.getId()
+						+ ", name = '" + mergedName + "' "
+						+ " WHERE id = " + sourceHost.getHostId();
+				s.executeUpdate(query);
+			}
+
+			trans.commit();
+			trans = null;
+
+			// Fire events for updated and deleted hosts
+			fireChangeEvent(sourceHost);
+			fireDeletedEvent(destHost);
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error executing query: " + query, ex);
+		} finally {
+			if (trans != null) {
+				trans.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Create the query to update the host id column to the merged host.
+	 *
+	 * @param tableName  Name of table to update.
+	 * @param columnName Name of the column containing the host id.
+	 * @param sourceHost The source host.
+	 * @param destHost   The destination host.
+	 *
+	 * @return The query.
+	 */
+	private String makeOsAccountUpdateQuery(String tableName, String columnName, Host sourceHost, Host destHost) {
+		return "UPDATE " + tableName + " SET " + columnName + " = " + destHost.getHostId() + " WHERE " + columnName + " = " + sourceHost.getHostId();
+	}
+
+	/**
+	 * Create a random name for hosts that have been merged.
+	 *
+	 * @return The random signature.
+	 */
+	private String makeMergedHostName() {
+		return "MERGED " + UUID.randomUUID().toString();
+	}
+
+	/**
+	 * Fires an event that a host has changed. Do not call this with an open
+	 * transaction.
+	 *
+	 * @param newValue The new value for the host.
+	 */
+	private void fireChangeEvent(Host newValue) {
+		db.fireTSKEvent(new HostsUpdatedTskEvent(Collections.singletonList(newValue)));
+	}
+
+	/**
+	 * Fires an event that a host has been deleted. Do not call this with an
+	 * open transaction.
+	 *
+	 * @param deleted The deleted host.
+	 */
+	private void fireDeletedEvent(Host deleted) {
+		db.fireTSKEvent(new HostsDeletedTskEvent(Collections.singletonList(deleted.getHostId())));
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Image.java b/bindings/java/src/org/sleuthkit/datamodel/Image.java
index a465af5975966e2f9cc12870732f1af7f1a3328c..7e2d793a29e1749b0d4b4aa192334d73fa4def6b 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Image.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Image.java
@@ -43,6 +43,7 @@ public class Image extends AbstractContent implements DataSource {
 	private long size;
 	private final String[] paths;
 	private volatile long imageHandle = 0;
+	private volatile Host host = null;
 	private final String deviceId, timezone;
 	private String md5, sha1, sha256;
 	private static ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
@@ -514,10 +515,10 @@ public long getContentSize(SleuthkitCase sleuthkitCase) throws TskCoreException
 
 		return contentSize;
 	}
-	
+
 	/**
 	 * Sets the acquisition details field in the case database.
-	 * 
+	 *
 	 * @param details The acquisition details
 	 * 
 	 * @throws TskCoreException Thrown if the data can not be written
@@ -526,7 +527,66 @@ public long getContentSize(SleuthkitCase sleuthkitCase) throws TskCoreException
 	public void setAcquisitionDetails(String details) throws TskCoreException {
 		getSleuthkitCase().setAcquisitionDetails(this, details);
 	}
-	
+
+	/**
+	 * Sets the acquisition tool details such as its name, version number and
+	 * any settings used during the acquisition to acquire data.
+	 *
+	 * @param name     The name of the acquisition tool. May be NULL.
+	 * @param version  The acquisition tool version number. May be NULL.
+	 * @param settings The settings used by the acquisition tool. May be NULL.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be written
+	 */
+	@Override
+	public void setAcquisitionToolDetails(String name, String version, String settings) throws TskCoreException {
+		getSleuthkitCase().setAcquisitionToolDetails(this, name, version, settings);
+	}
+
+	/**
+	 * Gets the acquisition tool settings field from the case database.
+	 *
+	 * @return The acquisition tool settings. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public String getAcquisitionToolSettings() throws TskCoreException {
+		return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_settings");
+	}
+
+	/**
+	 * Gets the acquisition tool name field from the case database.
+	 *
+	 * @return The acquisition tool name. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public String getAcquisitionToolName() throws TskCoreException{
+		return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_name");
+	}
+
+	/**
+	 * Gets the acquisition tool version field from the case database.
+	 *
+	 * @return The acquisition tool version. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public String getAcquisitionToolVersion() throws TskCoreException {
+		return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_version");
+	}
+
+	/**
+	 * Gets the added date field from the case database.
+	 *
+	 * @return The date time when the image was added in epoch seconds.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public Long getDateAdded() throws TskCoreException {
+		return getSleuthkitCase().getDataSourceInfoLong(this, "added_date_time");
+	}
+
 	/**
 	 * Gets the acquisition details field from the case database.
 	 * 
@@ -538,6 +598,40 @@ public void setAcquisitionDetails(String details) throws TskCoreException {
 	public String getAcquisitionDetails() throws TskCoreException {
 		return getSleuthkitCase().getAcquisitionDetails(this);
 	}	
+	
+	/**
+	 * Gets the host for this data source.
+	 * 
+	 * @return The host
+	 * 
+	 * @throws TskCoreException 
+	 */
+	@Override
+	public Host getHost() throws TskCoreException {
+		// This is a check-then-act race condition that may occasionally result
+		// in additional processing but is safer than using locks.
+		if (host == null) {
+			host = getSleuthkitCase().getHostManager().getHostByDataSource(this);
+		}
+		return host;
+	}	
+
+	/**
+	 * Updates the image's total size and sector size.This function may be used
+	 * to update the sizes after the image was created.
+	 *
+	 * Can only update the sizes if they were not set before. Will throw
+	 * TskCoreException if the values in the db are not 0 prior to this call.
+	 *
+	 * @param totalSize  The total size
+	 * @param sectorSize The sector size
+	 *
+	 * @throws TskCoreException If there is an error updating the case database.
+	 *
+	 */
+	public void setSizes(long totalSize, long sectorSize) throws TskCoreException {
+		getSleuthkitCase().setImageSizes(this, totalSize, sectorSize);
+	}
 
 	/**
 	 * Close a ResultSet.
diff --git a/bindings/java/src/org/sleuthkit/datamodel/LayoutFile.java b/bindings/java/src/org/sleuthkit/datamodel/LayoutFile.java
index ab98c64e75a8ffc50219df350fbe2c63b2a539f6..dea60c1329b396f7e1e817c3e84f387e6c1802ed 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/LayoutFile.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/LayoutFile.java
@@ -18,6 +18,7 @@
  */
 package org.sleuthkit.datamodel;
 
+import java.util.Collections;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.sleuthkit.datamodel.TskData.FileKnown;
@@ -81,6 +82,9 @@ public class LayoutFile extends AbstractFile {
 	 * @param parentPath         The path of the parent of the file.
 	 * @param mimeType           The MIME type of the file, null if it has not
 	 *                           yet been determined.
+	 * @param ownerUid			 UID of the file owner as found in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
 	 */
 	LayoutFile(SleuthkitCase db,
 			long objId,
@@ -92,8 +96,11 @@ public class LayoutFile extends AbstractFile {
 			long size,
 			long ctime, long crtime, long atime, long mtime,
 			String md5Hash, String sha256Hash, FileKnown knownState,
-			String parentPath, String mimeType) {
-		super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, fileType, 0L, 0, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, SleuthkitCase.extractExtension(name));
+			String parentPath, String mimeType,
+			String ownerUid,
+			Long osAccountObjId) {
+			
+		super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, name, fileType, 0L, 0, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, SleuthkitCase.extractExtension(name), ownerUid, osAccountObjId, Collections.emptyList());
 	}
 
 	/**
@@ -280,6 +287,6 @@ protected LayoutFile(SleuthkitCase db, long objId, String name,
 			TSK_FS_NAME_TYPE_ENUM dirType, TSK_FS_META_TYPE_ENUM metaType,
 			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags,
 			long size, String md5Hash, FileKnown knownState, String parentPath) {
-		this(db, objId, db.getDataSourceObjectId(objId), name, fileType, dirType, metaType, dirFlag, metaFlags, size, 0L, 0L, 0L, 0L, md5Hash, null, knownState, parentPath, null);
+		this(db, objId, db.getDataSourceObjectId(objId), name, fileType, dirType, metaType, dirFlag, metaFlags, size, 0L, 0L, 0L, 0L, md5Hash, null, knownState, parentPath, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT);
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/LocalFile.java b/bindings/java/src/org/sleuthkit/datamodel/LocalFile.java
index 99abf74e70cd7f77cbaa062e0fa1316072064611..1b749b080ce91cc5624194e6920abdc4f5f2362e 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/LocalFile.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/LocalFile.java
@@ -74,6 +74,9 @@ public class LocalFile extends AbstractFile {
 	 * @param encodingType		     The encoding type of the file.
 	 * @param extension          The extension part of the file name (not
 	 *                           including the '.'), can be null.
+	 * @param ownerUid			 String UID of the user as found in in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
 	 */
 	LocalFile(SleuthkitCase db,
 			long objId,
@@ -88,10 +91,12 @@ public class LocalFile extends AbstractFile {
 			long dataSourceObjectId,
 			String localPath,
 			TskData.EncodingType encodingType,
-			String extension) {
+			String extension,
+			String ownerUid,
+			Long osAccountObjId) {
 		super(db, objId, dataSourceObjectId, TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0,
 				name, fileType, 0L, 0, dirType, metaType, dirFlag,
-				metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension);
+				metaFlags, size, ctime, crtime, atime, mtime, (short) 0, 0, 0, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, Collections.emptyList());
 		// TODO (AUT-1904): The parent id should be passed to AbstractContent 
 		// through the class hierarchy contructors, using 
 		// AbstractContent.UNKNOWN_ID as needed.
@@ -222,7 +227,7 @@ protected LocalFile(SleuthkitCase db,
 				AbstractContent.UNKNOWN_ID, parentPath,
 				db.getDataSourceObjectId(objId),
 				localPath,
-				TskData.EncodingType.NONE, null);
+				TskData.EncodingType.NONE, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT);
 	}
 
 	/**
diff --git a/bindings/java/src/org/sleuthkit/datamodel/LocalFilesDataSource.java b/bindings/java/src/org/sleuthkit/datamodel/LocalFilesDataSource.java
index 0572291890859ea7ecaa7a61236cde43b1c102e5..ae5652c88eb550555c887a69db33822a162f9b3a 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/LocalFilesDataSource.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/LocalFilesDataSource.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2011-2017 Basis Technology Corp.
+ * Copyright 2011-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -39,6 +39,7 @@ public class LocalFilesDataSource extends VirtualDirectory implements DataSource
 	private final long objectId;
 	private final String deviceId;
 	private final String timezone;
+	private volatile Host host;
 
 	private static final Logger LOGGER = Logger.getLogger(LocalFilesDataSource.class.getName());
 
@@ -180,19 +181,34 @@ static long getContentSize(SleuthkitCase sleuthkitCase, long dataSourceObjId) th
 
 		return contentSize;
 	}
-	
+
 	/**
 	 * Sets the acquisition details field in the case database.
-	 * 
+	 *
 	 * @param details The acquisition details
-	 * 
+	 *
 	 * @throws TskCoreException Thrown if the data can not be written
 	 */
 	@Override
 	public void setAcquisitionDetails(String details) throws TskCoreException {
 		getSleuthkitCase().setAcquisitionDetails(this, details);
 	}
-	
+
+	/**
+	 * Sets the acquisition tool details such as its name, version number and
+	 * any settings used during the acquisition to acquire data.
+	 *
+	 * @param name     The name of the acquisition tool. May be NULL.
+	 * @param version  The acquisition tool version number. May be NULL.
+	 * @param settings The settings used by the acquisition tool. May be NULL.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be written
+	 */
+	@Override
+	public void setAcquisitionToolDetails(String name, String version, String settings) throws TskCoreException {
+		getSleuthkitCase().setAcquisitionToolDetails(this, name, version, settings);
+	}
+  
 	/**
 	 * Gets the acquisition details field from the case database.
 	 * 
@@ -205,6 +221,69 @@ public String getAcquisitionDetails() throws TskCoreException {
 		return getSleuthkitCase().getAcquisitionDetails(this);
 	}
 
+
+	/**
+	 * Gets the acquisition tool settings field from the case database.
+	 *
+	 * @return The acquisition tool settings. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	@Override
+	public String getAcquisitionToolSettings() throws TskCoreException {
+		return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_settings");
+	}
+
+	/**
+	 * Gets the acquisition tool name field from the case database.
+	 *
+	 * @return The acquisition tool name. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public String getAcquisitionToolName() throws TskCoreException {
+		return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_name");
+	}
+
+	/**
+	 * Gets the acquisition tool version field from the case database.
+	 *
+	 * @return The acquisition tool version. May be Null if not set.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public String getAcquisitionToolVersion() throws TskCoreException{
+		return getSleuthkitCase().getDataSourceInfoString(this, "acquisition_tool_version");
+	}
+	
+	/**
+	 * Gets the host for this data source.
+	 * 
+	 * @return The host
+	 * 
+	 * @throws TskCoreException 
+	 */
+	@Override
+	public Host getHost() throws TskCoreException {
+		// This is a check-then-act race condition that may occasionally result
+		// in additional processing but is safer than using locks.
+		if (host == null) {
+			host = getSleuthkitCase().getHostManager().getHostByDataSource(this);
+		}
+		return host;
+	}	
+
+	/**
+	 * Gets the added date field from the case database.
+	 *
+	 * @return The date time when the image was added in epoch seconds.
+	 *
+	 * @throws TskCoreException Thrown if the data can not be read
+	 */
+	public Long getDateAdded() throws TskCoreException {
+		return getSleuthkitCase().getDataSourceInfoLong(this, "added_date_time");
+	}
+
 	/**
 	 * Close a ResultSet.
 	 *
@@ -235,6 +314,34 @@ private static void closeStatement(Statement statement) {
 		}
 	}
 	
+	/**
+	 * Accepts a content visitor (Visitor design pattern).
+	 *
+	 * @param <T>     The type returned by the visitor.
+	 * @param visitor A ContentVisitor supplying an algorithm to run using this
+	 *                virtual directory as input.
+	 *
+	 * @return The output of the algorithm.
+	 */
+	@Override
+	public <T> T accept(ContentVisitor<T> visitor) {
+		return visitor.visit(this);
+	}
+	
+	/**
+	 * Accepts a Sleuthkit item visitor (Visitor design pattern).
+	 *
+	 * @param <T>     The type returned by the visitor.
+	 * @param visitor A SleuthkitItemVisitor supplying an algorithm to run using
+	 *                this virtual directory as input.
+	 *
+	 * @return The output of the algorithm.
+	 */
+	@Override
+	public <T> T accept(SleuthkitItemVisitor<T> visitor) {
+		return visitor.visit(this);
+	}
+	
 	/**
 	 * Constructs a local/logical files and/or directories data source.
 	 *
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java
new file mode 100644
index 0000000000000000000000000000000000000000..d78434f4c3e7efcc660b4b4df9ca32d33b6c770a
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccount.java
@@ -0,0 +1,567 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.ResourceBundle;
+
+/**
+ * Abstracts an OS user account. OS Accounts have a scope, which is defined by
+ * their parent OsAccountRealm.
+ *
+ * An OS user account may own files and (some) artifacts.
+ *
+ * OsAcounts can be created with minimal data and updated as more is learned.
+ * Caller must call update() to save any new data.
+ */
+public final class OsAccount extends AbstractContent {
+
+	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
+
+	final static Long NO_ACCOUNT = null;
+	final static String NO_OWNER_ID = null;
+
+	private final SleuthkitCase sleuthkitCase;
+
+	private final long osAccountObjId;	// Object ID within the database
+	private final long realmId;		// realm where the account exists in (could be local or domain scoped)
+	private final String loginName;	// user login name - may be null
+	private final String addr;	// a unique user sid/uid, may be null
+
+	private final String signature;		// This exists only to prevent duplicates.
+	// Together realm_id & signature must be unique for each account.
+	// It is either addr if addr is defined,
+	// or the login_name if login_name is defined.
+
+	private final String fullName;	// full name, may be null
+	private final OsAccountType osAccountType;
+	private final OsAccountStatus osAccountStatus;
+	private final OsAccountDbStatus osAccountDbStatus;  // Status of row in the database
+	private final Long creationTime;
+
+	private List<OsAccountAttribute> osAccountAttributes = null;
+
+	/**
+	 * Encapsulates status of an account - whether is it active or disabled or
+	 * deleted.
+	 */
+	public enum OsAccountStatus {
+		UNKNOWN(0, bundle.getString("OsAccountStatus.Unknown.text")),
+		ACTIVE(1, bundle.getString("OsAccountStatus.Active.text")),
+		DISABLED(2, bundle.getString("OsAccountStatus.Disabled.text")),
+		DELETED(3, bundle.getString("OsAccountStatus.Deleted.text"));
+
+		private final int id;
+		private final String name;
+
+		OsAccountStatus(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		/**
+		 * Get account status id.
+		 *
+		 * @return Account status id.
+		 */
+		public int getId() {
+			return id;
+		}
+
+		/**
+		 * Get the account status enum name.
+		 *
+		 * @return
+		 */
+		public String getName() {
+			return name;
+		}
+
+		/**
+		 * Gets account status enum from id.
+		 *
+		 * @param statusId Id to look for.
+		 *
+		 * @return Account status enum.
+		 */
+		public static OsAccountStatus fromID(int statusId) {
+			for (OsAccountStatus statusType : OsAccountStatus.values()) {
+				if (statusType.ordinal() == statusId) {
+					return statusType;
+				}
+			}
+			return null;
+		}
+	}
+
+	/**
+	 * Encapsulates status of OsAccount row. OsAccounts that are not "Active"
+	 * are generally invisible - they will not be returned by any queries on the
+	 * string fields.
+	 */
+	enum OsAccountDbStatus {
+		ACTIVE(0, "Active"),
+		MERGED(1, "Merged"),
+		DELETED(2, "Deleted");
+
+		private final int id;
+		private final String name;
+
+		OsAccountDbStatus(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		int getId() {
+			return id;
+		}
+
+		String getName() {
+			return name;
+		}
+
+		static OsAccountDbStatus fromID(int typeId) {
+			for (OsAccountDbStatus type : OsAccountDbStatus.values()) {
+				if (type.ordinal() == typeId) {
+					return type;
+				}
+			}
+			return null;
+		}
+	}
+
+	/**
+	 * Encapsulates an account type - whether it's an interactive login account
+	 * or a service account.
+	 */
+	public enum OsAccountType {
+		UNKNOWN(0, bundle.getString("OsAccountType.Unknown.text")),
+		SERVICE(1, bundle.getString("OsAccountType.Service.text")),
+		INTERACTIVE(2, bundle.getString("OsAccountType.Interactive.text"));
+
+		private final int id;
+		private final String name;
+
+		OsAccountType(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		/**
+		 * Get account type id.
+		 *
+		 * @return Account type id.
+		 */
+		public int getId() {
+			return id;
+		}
+
+		/**
+		 * Get account type name.
+		 *
+		 * @return Account type name.
+		 */
+		public String getName() {
+			return name;
+		}
+
+		/**
+		 * Gets account type enum from id.
+		 *
+		 * @param typeId Id to look for.
+		 *
+		 * @return Account type enum.
+		 */
+		public static OsAccountType fromID(int typeId) {
+			for (OsAccountType accountType : OsAccountType.values()) {
+				if (accountType.ordinal() == typeId) {
+					return accountType;
+				}
+			}
+			return null;
+		}
+	}
+
+	/**
+	 * Constructs an OsAccount with a realm/username and unique id, and
+	 * signature.
+	 *
+	 * @param sleuthkitCase  The SleuthKit case (case database) that contains
+	 *                       the artifact data.
+	 * @param osAccountobjId Obj id of the account in tsk_objects table.
+	 * @param realmId        Realm - defines the scope of this account.
+	 * @param loginName      Login name for the account. May be null.
+	 * @param uniqueId       An id unique within the realm - a SID or uid. May
+	 *                       be null, only if login name is not null.
+	 * @param signature	     A unique signature constructed from realm id and
+	 *                       loginName or uniqueId.
+	 * @param fullName       Full name.
+	 * @param creationTime   Account creation time.
+	 * @param accountType    Account type.
+	 * @param accountStatus  Account status.
+	 * @param dbStatus       Status of row in database.
+	 */
+	OsAccount(SleuthkitCase sleuthkitCase, long osAccountobjId, long realmId, String loginName, String uniqueId, String signature,
+			String fullName, Long creationTime, OsAccountType accountType, OsAccountStatus accountStatus, OsAccountDbStatus accountDbStatus) {
+
+		super(sleuthkitCase, osAccountobjId, signature);
+
+		this.sleuthkitCase = sleuthkitCase;
+		this.osAccountObjId = osAccountobjId;
+		this.realmId = realmId;
+		this.loginName = loginName;
+		this.addr = uniqueId;
+		this.signature = signature;
+		this.fullName = fullName;
+		this.creationTime = creationTime;
+		this.osAccountType = accountType;
+		this.osAccountStatus = accountStatus;
+		this.osAccountDbStatus = accountDbStatus;
+	}
+
+	/**
+	 * This function is used by OsAccountManger to update the list of OsAccount
+	 * attributes.
+	 *
+	 * @param osAccountAttributes The osAccount attributes that are to be added.
+	 */
+	synchronized void setAttributesInternal(List<OsAccountAttribute> osAccountAttributes) {
+		this.osAccountAttributes = osAccountAttributes;
+	}
+
+	/**
+	 * Get the account Object Id that is unique within the scope of the case.
+	 *
+	 * @return Account
+	 * id.
+	 */
+	public long getId() {
+		return osAccountObjId;
+	}
+
+	/**
+	 * Get the unique identifier for the account, such as UID or SID. The id is
+	 * unique within the account realm.
+	 *
+	 * @return Optional unique identifier.
+	 */
+	public Optional<String> getAddr() {
+		return Optional.ofNullable(addr);
+	}
+
+	/**
+	 * Get the ID for the account realm. Get the Realm via
+	 * OsAccountRealmManager.getRealmByRealmId() NOTE: The realm may get updated
+	 * as more data is parsed, so listen for events to update as needed.
+	 *
+	 * @return
+	 */
+	public long getRealmId() {
+		return realmId;
+	}
+
+	/**
+	 * Get account login name, such as "jdoe"
+	 *
+	 * @return Optional login name.
+	 */
+	public Optional<String> getLoginName() {
+		return Optional.ofNullable(loginName);
+	}
+
+	/**
+	 * Get the account signature.
+	 *
+	 * @return Account signature.
+	 */
+	String getSignature() {
+		return signature;
+	}
+
+	/**
+	 * Get account user full name, such as "John Doe"
+	 *
+	 * @return Optional with full name.
+	 */
+	public Optional<String> getFullName() {
+		return Optional.ofNullable(fullName);
+	}
+
+	/**
+	 * Get account creation time.
+	 *
+	 * @return Optional with account creation time.
+	 */
+	public Optional<Long> getCreationTime() {
+		return Optional.ofNullable(creationTime);
+	}
+
+	/**
+	 * Get account type.
+	 *
+	 * @return Optional with account type.
+	 */
+	public Optional<OsAccountType> getOsAccountType() {
+		return Optional.ofNullable(osAccountType);
+	}
+
+	/**
+	 * Get account status.
+	 *
+	 * @return Optional with account status.
+	 */
+	public Optional<OsAccountStatus> getOsAccountStatus() {
+		return Optional.ofNullable(osAccountStatus);
+	}
+
+	/**
+	 * Get account status in the database.
+	 *
+	 * @return Database account status.
+	 */
+	public OsAccountDbStatus getOsAccountDbStatus() {
+		return osAccountDbStatus;
+	}
+
+	/**
+	 * Get additional account attributes.
+	 *
+	 * @return List of additional account attributes. May return an empty list.
+	 *
+	 * @throws TskCoreException
+	 */
+	public synchronized List<OsAccountAttribute> getExtendedOsAccountAttributes() throws TskCoreException {
+		if (osAccountAttributes == null) {
+			osAccountAttributes = sleuthkitCase.getOsAccountManager().getOsAccountAttributes(this);
+		}
+		return Collections.unmodifiableList(osAccountAttributes);
+	}
+
+	/**
+	 * Return the os account instances.
+	 *
+	 * @return List of all the OsAccountInstances. May return an empty list.
+	 *
+	 * @throws TskCoreException
+	 */
+	public synchronized List<OsAccountInstance> getOsAccountInstances() throws TskCoreException {
+		return sleuthkitCase.getOsAccountManager().getOsAccountInstances(this);
+	}
+
+	/**
+	 * Gets the SleuthKit case database for this account.
+	 *
+	 * @return The SleuthKit case object.
+	 */
+	@Override
+	public SleuthkitCase getSleuthkitCase() {
+		return sleuthkitCase;
+	}
+
+	@Override
+	public int read(byte[] buf, long offset, long len) throws TskCoreException {
+		// No data to read. 
+		return 0;
+	}
+
+	@Override
+	public void close() {
+		// nothing to close
+	}
+
+	@Override
+	public long getSize() {
+		// No data. 
+		return 0;
+	}
+
+	@Override
+	public <T> T accept(ContentVisitor<T> v) {
+
+		throw new UnsupportedOperationException("Not supported yet.");
+	}
+
+	@Override
+	public <T> T accept(SleuthkitItemVisitor<T> v) {
+		return v.visit(this);
+	}
+
+	/**
+	 * Abstracts attributes of an OS account. An attribute may be specific to a
+	 * host, or applicable across all hosts.
+	 *
+	 * As an example, last login time is host specific, whereas last password
+	 * reset date is independent of a host.
+	 *
+	 */
+	public final class OsAccountAttribute extends AbstractAttribute {
+
+		private final long osAccountObjId;	// OS account to which this attribute belongs.
+		private final Long hostId; // Host to which this attribute applies, may be null
+		private final Long sourceObjId; // Object id of the source where the attribute was discovered.
+
+		/**
+		 * Creates an os account attribute with int value.
+		 *
+		 * @param attributeType Attribute type.
+		 * @param valueInt      Int value.
+		 * @param osAccount     Account which the attribute pertains to.
+		 * @param host          Host on which the attribute applies to. Pass
+		 *                      Null if the attribute applies to all the hosts
+		 *                      in the realm.
+		 * @param sourceObj     Source where the attribute was found, may be
+		 *                      null.
+		 */
+		public OsAccountAttribute(BlackboardAttribute.Type attributeType, int valueInt, OsAccount osAccount, Host host, Content sourceObj) {
+			super(attributeType, valueInt);
+
+			this.osAccountObjId = osAccount.getId();
+			this.hostId = (host != null ? host.getHostId() : null);
+			this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null);
+		}
+
+		/**
+		 * Creates an os account attribute with long value.
+		 *
+		 * @param attributeType Attribute type.
+		 * @param valueLong     Long value.
+		 * @param osAccount     Account which the attribute pertains to.
+		 * @param host          Host on which the attribute applies to. Pass
+		 *                      Null if it applies across hosts.
+		 * @param sourceObj     Source where the attribute was found.
+		 */
+		public OsAccountAttribute(BlackboardAttribute.Type attributeType, long valueLong, OsAccount osAccount, Host host, Content sourceObj) {
+			super(attributeType, valueLong);
+
+			this.osAccountObjId = osAccount.getId();
+			this.hostId = (host != null ? host.getHostId() : null);
+			this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null);
+		}
+
+		/**
+		 * Creates an os account attribute with double value.
+		 *
+		 * @param attributeType Attribute type.
+		 * @param valueDouble   Double value.
+		 * @param osAccount     Account which the attribute pertains to.
+		 * @param host          Host on which the attribute applies to. Pass
+		 *                      Null if it applies across hosts.
+		 * @param sourceObj     Source where the attribute was found.
+		 */
+		public OsAccountAttribute(BlackboardAttribute.Type attributeType, double valueDouble, OsAccount osAccount, Host host, Content sourceObj) {
+			super(attributeType, valueDouble);
+
+			this.osAccountObjId = osAccount.getId();
+			this.hostId = (host != null ? host.getHostId() : null);
+			this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null);
+		}
+
+		/**
+		 * Creates an os account attribute with string value.
+		 *
+		 * @param attributeType Attribute type.
+		 * @param valueString   String value.
+		 * @param osAccount     Account which the attribute pertains to.
+		 * @param host          Host on which the attribute applies to. Pass
+		 *                      Null if applies across hosts.
+		 * @param sourceObj     Source where the attribute was found.
+		 */
+		public OsAccountAttribute(BlackboardAttribute.Type attributeType, String valueString, OsAccount osAccount, Host host, Content sourceObj) {
+			super(attributeType, valueString);
+
+			this.osAccountObjId = osAccount.getId();
+			this.hostId = (host != null ? host.getHostId() : null);
+			this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null);
+		}
+
+		/**
+		 * Creates an os account attribute with byte-array value.
+		 *
+		 * @param attributeType Attribute type.
+		 * @param valueBytes    Bytes value.
+		 * @param osAccount     Account which the attribute pertains to.
+		 * @param host          Host on which the attribute applies to. Pass
+		 *                      Null if it applies across hosts.
+		 * @param sourceObj     Source where the attribute was found.
+		 */
+		public OsAccountAttribute(BlackboardAttribute.Type attributeType, byte[] valueBytes, OsAccount osAccount, Host host, Content sourceObj) {
+			super(attributeType, valueBytes);
+
+			this.osAccountObjId = osAccount.getId();
+			this.hostId = (host != null ? host.getHostId() : null);
+			this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null);
+		}
+
+		/**
+		 * Constructor to be used when creating an attribute after reading the
+		 * data from the table.
+		 *
+		 * @param attributeType Attribute type.
+		 * @param valueInt      Int value.
+		 * @param valueLong     Long value.
+		 * @param valueDouble   Double value.
+		 * @param valueString   String value.
+		 * @param valueBytes    Bytes value.
+		 * @param sleuthkitCase Sleuthkit case.
+		 * @param osAccount     Account which the attribute pertains to.
+		 * @param host          Host on which the attribute applies to. Pass
+		 *                      Null if it applies across hosts.
+		 * @param sourceObj     Source where the attribute was found.
+		 */
+		OsAccountAttribute(BlackboardAttribute.Type attributeType, int valueInt, long valueLong, double valueDouble, String valueString, byte[] valueBytes,
+				SleuthkitCase sleuthkitCase, OsAccount osAccount, Host host, Content sourceObj) {
+
+			super(attributeType,
+					valueInt, valueLong, valueDouble, valueString, valueBytes,
+					sleuthkitCase);
+			this.osAccountObjId = osAccount.getId();
+			this.hostId = (host != null ? host.getHostId() : null);
+			this.sourceObjId = (sourceObj != null ? sourceObj.getId() : null);
+		}
+
+		/**
+		 * Get the host id for the account attribute.
+		 *
+		 * @return Optional with Host id.
+		 */
+		public Optional<Long> getHostId() {
+			return Optional.ofNullable(hostId);
+		}
+
+		/**
+		 * Get the object id of account to which this attribute applies.
+		 *
+		 * @return Account row id.
+		 */
+		public long getOsAccountObjectId() {
+			return osAccountObjId;
+		}
+
+		/**
+		 * Get the object id of the source where the attribute was found.
+		 *
+		 * @return Object id of source.
+		 */
+		public Optional<Long> getSourceObjectId() {
+			return Optional.ofNullable(sourceObjId);
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java
new file mode 100755
index 0000000000000000000000000000000000000000..0a39baaf982e51fb8a541f54b77cd1ec6a52de3e
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountInstance.java
@@ -0,0 +1,249 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Objects;
+import java.util.ResourceBundle;
+
+/**
+ * An OsAccountInstance represents the appearance of a particular OsAccount on a
+ * particular data source.
+ */
+public class OsAccountInstance implements Comparable<OsAccountInstance> {
+
+	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
+
+	private final SleuthkitCase skCase;
+	private final long instanceId;
+	private final long accountId;
+	private final long dataSourceId;
+	private final OsAccountInstanceType instanceType;
+
+	private OsAccount account;
+	private DataSource dataSource;
+
+	/**
+	 * Constructs a representation of an OS account instance.
+	 *
+	 *
+	 * @param skCase          The case database.
+	 * @param instanceId      The instance ID.
+	 * @param account         The OS account of which this object is an
+	 *                        instance.
+	 * @param dataSourceObjId The object ID of the data source where the
+	 *                        instance was found.
+	 * @param instanceType    The instance type.
+	 */
+	OsAccountInstance(SleuthkitCase skCase, long instanceId, OsAccount account, long dataSourceId, OsAccountInstanceType instanceType) {
+		this(skCase, instanceId, account.getId(), dataSourceId, instanceType);
+		this.account = account;
+	}
+
+	/**
+	 * Constructs a representation of an OS account instance.
+	 *
+	 * @param skCase          The case database.
+	 * @param instanceId      The instance ID.
+	 * @param accountObjId    The object ID of the OS account of which this
+	 *                        object is an instance.
+	 * @param dataSourceObjId The object ID of the data source where the
+	 *                        instance was found.
+	 * @param instanceType    The instance type.
+	 */
+	OsAccountInstance(SleuthkitCase skCase, long instanceId, long accountObjId, long dataSourceObjId, OsAccountInstanceType instanceType) {
+		this.skCase = skCase;
+		this.instanceId = instanceId;
+		this.accountId = accountObjId;
+		this.dataSourceId = dataSourceObjId;
+		this.instanceType = instanceType;
+	}
+
+	/**
+	 * Gets the instance ID of this OS account instance.
+	 *
+	 * @return The instance ID.
+	 */
+	public long getInstanceId() {
+		return instanceId;
+	}
+
+	/**
+	 * Returns the OsAccount object for this instance.
+	 *
+	 * @return The OsAccount object.
+	 *
+	 * @throws TskCoreException Exception thrown if there is an error querying
+	 *                          the case database.
+	 */
+	public OsAccount getOsAccount() throws TskCoreException {
+		if (account == null) {
+			try {
+				account = skCase.getOsAccountManager().getOsAccountByObjectId(accountId);
+			} catch (TskCoreException ex) {
+				throw new TskCoreException(String.format("Failed to get OsAccount for id %d", accountId), ex);
+			}
+		}
+
+		return account;
+	}
+
+	/**
+	 * Returns the data source for this account instance.
+	 *
+	 * @return Return the data source instance.
+	 *
+	 * @throws TskCoreException
+	 */
+	public DataSource getDataSource() throws TskCoreException {
+		if (dataSource == null) {
+			try {
+				dataSource = skCase.getDataSource(dataSourceId);
+			} catch (TskDataException ex) {
+				throw new TskCoreException(String.format("Failed to get DataSource for id %d", dataSourceId), ex);
+			}
+		}
+
+		return dataSource;
+	}
+
+	/**
+	 * Returns the type for this OsAccount instance.
+	 *
+	 * @return
+	 */
+	public OsAccountInstanceType getInstanceType() {
+		return instanceType;
+	}
+
+	/**
+	 * Return the dataSourceId value.
+	 *
+	 * @return Id of the instance data source.
+	 */
+	private long getDataSourceId() {
+		return dataSourceId;
+	}
+
+	@Override
+	public int compareTo(OsAccountInstance other) {
+		if (equals(other)) {
+			return 0;
+		}
+
+		if (dataSourceId != other.getDataSourceId()) {
+			return Long.compare(dataSourceId, other.getDataSourceId());
+		}
+
+		return Long.compare(accountId, other.accountId);
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj) {
+			return true;
+		}
+		if (obj == null) {
+			return false;
+		}
+		if (getClass() != obj.getClass()) {
+			return false;
+		}
+		final OsAccountInstance other = (OsAccountInstance) obj;
+		if (this.accountId != other.accountId) {
+			return false;
+		}
+
+		return this.dataSourceId != other.dataSourceId;
+	}
+
+	@Override
+	public int hashCode() {
+		int hash = 7;
+		hash = 67 * hash + Objects.hashCode(this.dataSourceId);
+		hash = 67 * hash + Objects.hashCode(this.accountId);
+		hash = 67 * hash + Objects.hashCode(this.instanceType);
+		return hash;
+	}
+
+	/**
+	 * Describes the relationship between an os account instance and the host
+	 * where the instance was found.
+	 *
+	 * Whether an os account actually performed any action on the host or if
+	 * just a reference to it was found on the host (such as in a log file)
+	 */
+	public enum OsAccountInstanceType {
+		LAUNCHED(0, bundle.getString("OsAccountInstanceType.Launched.text"), bundle.getString("OsAccountInstanceType.Launched.descr.text")), // the user launched a program on the host
+		ACCESSED(1, bundle.getString("OsAccountInstanceType.Accessed.text"), bundle.getString("OsAccountInstanceType.Accessed.descr.text")), // user accesed a resource for read/write
+		REFERENCED(2, bundle.getString("OsAccountInstanceType.Referenced.text"), bundle.getString("OsAccountInstanceType.Referenced.descr.text"));	// user was referenced, e.g. in a event log.
+
+		private final int id;
+		private final String name;
+		private final String description;
+
+		OsAccountInstanceType(int id, String name, String description) {
+			this.id = id;
+			this.name = name;
+			this.description = description;
+		}
+
+		/**
+		 * Get account instance type id.
+		 *
+		 * @return Account instance type id.
+		 */
+		public int getId() {
+			return id;
+		}
+
+		/**
+		 * Get account instance type name.
+		 *
+		 * @return Account instance type name.
+		 */
+		public String getName() {
+			return name;
+		}
+
+		/**
+		 * Get account instance type description.
+		 *
+		 * @return Account instance type description.
+		 */
+		public String getDescription() {
+			return description;
+		}
+
+		/**
+		 * Gets account instance type enum from id.
+		 *
+		 * @param typeId Id to look for.
+		 *
+		 * @return Account instance type enum.
+		 */
+		public static OsAccountInstanceType fromID(int typeId) {
+			for (OsAccountInstanceType statusType : OsAccountInstanceType.values()) {
+				if (statusType.ordinal() == typeId) {
+					return statusType;
+				}
+			}
+			return null;
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java
new file mode 100755
index 0000000000000000000000000000000000000000..baf2a3e4fe4d8cf26f7dfa52795f2e9986e162eb
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountManager.java
@@ -0,0 +1,1650 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.base.Strings;
+import org.apache.commons.lang3.StringUtils;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.Collections;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.NavigableSet;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.concurrent.ConcurrentSkipListSet;
+import java.util.stream.Collectors;
+import org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE;
+import org.sleuthkit.datamodel.OsAccount.OsAccountStatus;
+import org.sleuthkit.datamodel.OsAccount.OsAccountType;
+import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
+import org.sleuthkit.datamodel.TskEvent.OsAccountsUpdatedTskEvent;
+
+/**
+ * Responsible for creating/updating/retrieving the OS accounts for files and
+ * artifacts.
+ */
+public final class OsAccountManager {
+
+	private final SleuthkitCase db;
+	private final Object osAcctInstancesCacheLock;
+	private final NavigableSet<OsAccountInstance> osAccountInstanceCache;
+
+	/**
+	 * Construct a OsUserManager for the given SleuthkitCase.
+	 *
+	 * @param skCase The SleuthkitCase
+	 *
+	 */
+	OsAccountManager(SleuthkitCase skCase) {
+		db = skCase;
+		osAcctInstancesCacheLock = new Object();
+		osAccountInstanceCache = new ConcurrentSkipListSet<>();
+	}
+
+	/**
+	 * Creates an OS account with given unique id and given realm id. If an
+	 * account already exists with the given id, then the existing OS account is
+	 * returned.
+	 *
+	 * @param uniqueAccountId Account sid/uid.
+	 * @param realm           Account realm.
+	 *
+	 * @return OsAccount.
+	 *
+	 * @throws TskCoreException If there is an error in creating the OSAccount.
+	 *
+	 */
+	OsAccount newOsAccount(String uniqueAccountId, OsAccountRealm realm) throws TskCoreException {
+
+		// ensure unique id is provided
+		if (Strings.isNullOrEmpty(uniqueAccountId)) {
+			throw new TskCoreException("Cannot create OS account with null uniqueId.");
+		}
+
+		if (realm == null) {
+			throw new TskCoreException("Cannot create OS account without a realm.");
+		}
+
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+
+			// try to create account
+			try {
+				OsAccount account = newOsAccount(uniqueAccountId, null, realm, OsAccount.OsAccountStatus.UNKNOWN, trans);
+				trans.commit();
+				trans = null;
+				return account;
+			} catch (SQLException ex) {
+				// Close the transaction before moving on
+				trans.rollback();
+				trans = null;
+
+				// Create may fail if an OsAccount already exists. 
+				Optional<OsAccount> osAccount = this.getOsAccountByAddr(uniqueAccountId, realm);
+				if (osAccount.isPresent()) {
+					return osAccount.get();
+				}
+
+				// create failed for some other reason, throw an exception
+				throw new TskCoreException(String.format("Error creating OsAccount with uniqueAccountId = %s in realm id = %d", uniqueAccountId, realm.getRealmId()), ex);
+			}
+		} finally {
+			if (trans != null) {
+				trans.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Creates an OS account with Windows-specific data. If an account already
+	 * exists with the given id or realm/login, then the existing OS account is
+	 * returned.
+	 *
+	 * If the account realm already exists, but is missing the address or the
+	 * realm name, the realm is updated.
+	 *
+	 * @param sid           Account sid/uid, can be null if loginName is
+	 *                      supplied.
+	 * @param loginName     Login name, can be null if sid is supplied.
+	 * @param realmName     Realm within which the accountId or login name is
+	 *                      unique. Can be null if sid is supplied.
+	 * @param referringHost Host referring the account.
+	 * @param realmScope    Realm scope.
+	 *
+	 * @return OsAccount.
+	 *
+	 * @throws TskCoreException                     If there is an error in
+	 *                                              creating the OSAccount.
+	 * @throws OsAccountManager.NotUserSIDException If the given SID is not a
+	 *                                              user SID.
+	 *
+	 */
+	public OsAccount newWindowsOsAccount(String sid, String loginName, String realmName, Host referringHost, OsAccountRealm.RealmScope realmScope) throws TskCoreException, NotUserSIDException {
+
+		if (realmScope == null) {
+			throw new TskCoreException("RealmScope cannot be null. Use UNKNOWN if scope is not known.");
+		}
+		if (referringHost == null) {
+			throw new TskCoreException("A referring host is required to create an account.");
+		}
+
+		// ensure at least one of the two is supplied - unique id or a login name
+		if (StringUtils.isBlank(sid) && StringUtils.isBlank(loginName)) {
+			throw new TskCoreException("Cannot create OS account with both uniqueId and loginName as null.");
+		}
+		// Realm name is required if the sid is null. 
+		if (StringUtils.isBlank(sid) && StringUtils.isBlank(realmName)) {
+			throw new TskCoreException("Realm name or SID is required to create a Windows account.");
+		}
+
+		if (!StringUtils.isBlank(sid) && !WindowsAccountUtils.isWindowsUserSid(sid)) {
+			throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", sid));
+		}
+
+		// get the realm for the account, and update it if it is missing addr or name.
+		Optional<OsAccountRealm> realmOptional;
+		try (CaseDbConnection connection = db.getConnection()) {
+			realmOptional = db.getOsAccountRealmManager().getAndUpdateWindowsRealm(sid, realmName, referringHost, connection);
+		}
+		OsAccountRealm realm;
+		if (realmOptional.isPresent()) {
+			realm = realmOptional.get();
+		} else {
+			// realm was not found, create it.
+			realm = db.getOsAccountRealmManager().newWindowsRealm(sid, realmName, referringHost, realmScope);
+		}
+
+		return newWindowsOsAccount(sid, loginName, realm);
+	}
+
+	/**
+	 * Creates an OS account with Windows-specific data. If an account already
+	 * exists with the given id or realm/login, then the existing OS account is
+	 * returned.
+	 *
+	 * @param sid       Account sid/uid, can be null if loginName is supplied.
+	 * @param loginName Login name, can be null if sid is supplied.
+	 * @param realm     The associated realm.
+	 *
+	 * @return OsAccount.
+	 *
+	 * @throws TskCoreException                     If there is an error in
+	 *                                              creating the OSAccount.
+	 * @throws OsAccountManager.NotUserSIDException If the given SID is not a
+	 *                                              user SID.
+	 *
+	 */
+	public OsAccount newWindowsOsAccount(String sid, String loginName, OsAccountRealm realm) throws TskCoreException, NotUserSIDException {
+
+		// ensure at least one of the two is supplied - unique id or a login name
+		if (StringUtils.isBlank(sid) && StringUtils.isBlank(loginName)) {
+			throw new TskCoreException("Cannot create OS account with both uniqueId and loginName as null.");
+		}
+
+		if (!StringUtils.isBlank(sid) && !WindowsAccountUtils.isWindowsUserSid(sid)) {
+			throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", sid));
+		}
+
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			// try to create account
+			try {
+				OsAccount account = newOsAccount(sid, loginName, realm, OsAccount.OsAccountStatus.UNKNOWN, trans);
+				trans.commit();
+				trans = null;
+				return account;
+			} catch (SQLException ex) {
+				// Rollback the transaction before proceeding
+				trans.rollback();
+				trans = null;
+
+				// Create may fail if an OsAccount already exists. 
+				Optional<OsAccount> osAccount;
+
+				// First search for account by uniqueId
+				if (!Strings.isNullOrEmpty(sid)) {
+					osAccount = getOsAccountByAddr(sid, realm);
+					if (osAccount.isPresent()) {
+						return osAccount.get();
+					}
+				}
+
+				// search by loginName
+				if (!Strings.isNullOrEmpty(loginName)) {
+					osAccount = getOsAccountByLoginName(loginName, realm);
+					if (osAccount.isPresent()) {
+						return osAccount.get();
+					}
+				}
+
+				// create failed for some other reason, throw an exception
+				throw new TskCoreException(String.format("Error creating OsAccount with sid = %s, loginName = %s, realm = %s, referring host = %s",
+						(sid != null) ? sid : "Null",
+						(loginName != null) ? loginName : "Null",
+						(!realm.getRealmNames().isEmpty()) ? realm.getRealmNames().get(0) : "Null",
+						realm.getScopeHost().isPresent() ? realm.getScopeHost().get().getName() : "Null"), ex);
+
+			}
+		} finally {
+			if (trans != null) {
+				trans.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Creates a OS account with the given uid, name, and realm.
+	 *
+	 * @param uniqueId      Account sid/uid. May be null.
+	 * @param loginName     Login name. May be null only if SID is not null.
+	 * @param realm	        Realm.
+	 * @param accountStatus Account status.
+	 * @param trans         Open transaction to use.
+	 *
+	 * @return OS account.
+	 *
+	 * @throws TskCoreException If there is an error creating the account.
+	 */
+	private OsAccount newOsAccount(String uniqueId, String loginName, OsAccountRealm realm, OsAccount.OsAccountStatus accountStatus, CaseDbTransaction trans) throws TskCoreException, SQLException {
+
+		if (Objects.isNull(realm)) {
+			throw new TskCoreException("Cannot create an OS Account, realm is NULL.");
+		}
+
+		String signature = getOsAccountSignature(uniqueId, loginName);
+		OsAccount account;
+
+		CaseDbConnection connection = trans.getConnection();
+
+		// first create a tsk_object for the OsAccount.
+		// RAMAN TODO: need to get the correct parent obj id.  
+		//            Create an Object Directory parent and used its id.
+		long parentObjId = 0;
+
+		int objTypeId = TskData.ObjectType.OS_ACCOUNT.getObjectType();
+		long osAccountObjId = db.addObject(parentObjId, objTypeId, connection);
+
+		String accountInsertSQL = "INSERT INTO tsk_os_accounts(os_account_obj_id, login_name, realm_id, addr, signature, status)"
+				+ " VALUES (?, ?, ?, ?, ?, ?)"; // NON-NLS
+
+		PreparedStatement preparedStatement = connection.getPreparedStatement(accountInsertSQL, Statement.NO_GENERATED_KEYS);
+		preparedStatement.clearParameters();
+
+		preparedStatement.setLong(1, osAccountObjId);
+
+		preparedStatement.setString(2, loginName);
+		preparedStatement.setLong(3, realm.getRealmId());
+
+		preparedStatement.setString(4, uniqueId);
+		preparedStatement.setString(5, signature);
+		preparedStatement.setInt(6, accountStatus.getId());
+
+		connection.executeUpdate(preparedStatement);
+
+		account = new OsAccount(db, osAccountObjId, realm.getRealmId(), loginName, uniqueId, signature,
+				null, null, null, accountStatus, OsAccount.OsAccountDbStatus.ACTIVE);
+
+		trans.registerAddedOsAccount(account);
+		return account;
+	}
+
+	/**
+	 * Get the OS account with the given unique id.
+	 *
+	 * @param addr Account sid/uid.
+	 * @param host Host for account realm, may be null.
+	 *
+	 * @return Optional with OsAccount, Optional.empty if no matching account is
+	 *         found.
+	 *
+	 * @throws TskCoreException If there is an error getting the account.
+	 */
+	private Optional<OsAccount> getOsAccountByAddr(String addr, Host host) throws TskCoreException {
+
+		try (CaseDbConnection connection = db.getConnection()) {
+			return getOsAccountByAddr(addr, host, connection);
+		}
+	}
+
+	/**
+	 * Gets the OS account for the given unique id.
+	 *
+	 * @param uniqueId   Account SID/uid.
+	 * @param host       Host to match the realm, may be null.
+	 * @param connection Database connection to use.
+	 *
+	 * @return Optional with OsAccount, Optional.empty if no account with
+	 *         matching uniqueId is found.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Optional<OsAccount> getOsAccountByAddr(String uniqueId, Host host, CaseDbConnection connection) throws TskCoreException {
+
+		String whereHostClause = (host == null)
+				? " 1 = 1 "
+				: " ( realms.scope_host_id = " + host.getHostId() + " OR realms.scope_host_id IS NULL) ";
+
+		String queryString = "SELECT accounts.os_account_obj_id as os_account_obj_id, accounts.login_name, accounts.full_name, "
+				+ " accounts.realm_id, accounts.addr, accounts.signature, "
+				+ "	accounts.type, accounts.status, accounts.admin, accounts.created_date, accounts.db_status, "
+				+ " realms.realm_name as realm_name, realms.realm_addr as realm_addr, realms.realm_signature, realms.scope_host_id, realms.scope_confidence, realms.db_status as realm_db_status "
+				+ " FROM tsk_os_accounts as accounts"
+				+ "		LEFT JOIN tsk_os_account_realms as realms"
+				+ " ON accounts.realm_id = realms.id"
+				+ " WHERE " + whereHostClause
+				+ "     AND accounts.db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId()
+				+ "		AND LOWER(accounts.addr) = LOWER('" + uniqueId + "')";
+
+		db.acquireSingleUserCaseReadLock();
+		try (Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (!rs.next()) {
+				return Optional.empty();	// no match found
+			} else {
+				return Optional.of(osAccountFromResultSet(rs));
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS account for unique id = %s and host = %s", uniqueId, (host != null ? host.getName() : "null")), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets an active OS Account by the realm and unique id.
+	 *
+	 * @param uniqueId Account unique id.
+	 * @param realm    Account realm.
+	 *
+	 * @return Optional with OsAccount, Optional.empty, if no user is found with
+	 *         matching realm and unique id.
+	 *
+	 * @throws TskCoreException
+	 */
+	Optional<OsAccount> getOsAccountByAddr(String uniqueId, OsAccountRealm realm) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_os_accounts"
+				+ " WHERE LOWER(addr) = LOWER('" + uniqueId + "')"
+				+ " AND db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId()
+				+ " AND realm_id = " + realm.getRealmId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (!rs.next()) {
+				return Optional.empty();	// no match found
+			} else {
+				return Optional.of(osAccountFromResultSet(rs));
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS account for realm = %s and uniqueId = %s.", (realm != null) ? realm.getSignature() : "NULL", uniqueId), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets a OS Account by the realm and login name.
+	 *
+	 * @param loginName Login name.
+	 * @param realm     Account realm.
+	 *
+	 * @return Optional with OsAccount, Optional.empty, if no user is found with
+	 *         matching realm and login name.
+	 *
+	 * @throws TskCoreException
+	 */
+	Optional<OsAccount> getOsAccountByLoginName(String loginName, OsAccountRealm realm) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_os_accounts"
+				+ " WHERE LOWER(login_name) = LOWER('" + loginName + "')"
+				+ " AND db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId()
+				+ " AND realm_id = " + realm.getRealmId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (!rs.next()) {
+				return Optional.empty();	// no match found
+			} else {
+				return Optional.of(osAccountFromResultSet(rs));
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS account for realm = %s and loginName = %s.", (realm != null) ? realm.getSignature() : "NULL", loginName), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get the OS Account with the given object id.
+	 *
+	 * @param osAccountObjId Object id for the account.
+	 *
+	 * @return OsAccount.
+	 *
+	 * @throws TskCoreException If there is an error getting the account.
+	 */
+	public OsAccount getOsAccountByObjectId(long osAccountObjId) throws TskCoreException {
+
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			return getOsAccountByObjectId(osAccountObjId, connection);
+		}
+	}
+
+	/**
+	 * Get the OsAccount with the given object id.
+	 *
+	 * @param osAccountObjId Object id for the account.
+	 * @param connection     Database connection to use.
+	 *
+	 * @return OsAccount.
+	 *
+	 * @throws TskCoreException If there is an error getting the account.
+	 */
+	OsAccount getOsAccountByObjectId(long osAccountObjId, CaseDbConnection connection) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_os_accounts"
+				+ " WHERE os_account_obj_id = " + osAccountObjId;
+
+		db.acquireSingleUserCaseReadLock();
+		try (Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (!rs.next()) {
+				throw new TskCoreException(String.format("No account found with obj id = %d ", osAccountObjId));
+			} else {
+				return osAccountFromResultSet(rs);
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting account with obj id = %d ", osAccountObjId), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Records that an OsAccount was used or referenced on a given data source.
+	 * This data is automatically recorded when a file or DataArtifact is
+	 * created.
+	 *
+	 * Use this method to explicitly record the association when: - Parsing
+	 * account information (such as in the registry) because the account may
+	 * already exist in the database, but the account did not create any files.
+	 * Therefore, no instance for it would be automatically created, even though
+	 * you found data about it. - You want to associate more than one OsAccount
+	 * with a DataArtifact. Call this for each OsAccount not specified in
+	 * 'newDataArtifact()'.
+	 *
+	 * This method does nothing if the instance is already recorded.
+	 *
+	 * @param osAccount    Account for which an instance needs to be added.
+	 * @param dataSource   Data source where the instance is found.
+	 * @param instanceType Instance type.
+	 *
+	 * @throws TskCoreException If there is an error creating the account
+	 *                          instance.
+	 */
+	public void newOsAccountInstance(OsAccount osAccount, DataSource dataSource, OsAccountInstance.OsAccountInstanceType instanceType) throws TskCoreException {
+		if (osAccount == null) {
+			throw new TskCoreException("Cannot create account instance with null account.");
+		}
+		if (dataSource == null) {
+			throw new TskCoreException("Cannot create account instance with null data source.");
+		}
+
+		/*
+		 * Check the cache of OS account instances for an existing instance for
+		 * this OS account and data source. Note that the account instance
+		 * created here has a bogus instance ID. This is possible since the
+		 * instance ID is not considered in the equals() and hashCode() methods
+		 * of this class.
+		 */
+		synchronized (osAcctInstancesCacheLock) {
+			if (osAccountInstanceCache.contains(new OsAccountInstance(db, 0, osAccount.getId(), dataSource.getId(), instanceType))) {
+				return;
+			}
+		}
+
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			newOsAccountInstance(osAccount.getId(), dataSource.getId(), instanceType, connection);
+		}
+	}
+
+	/**
+	 * Adds a row to the tsk_os_account_instances table. Does nothing if the
+	 * instance already exists in the table.
+	 *
+	 * @param osAccountId     Account id for which an instance needs to be
+	 *                        added.
+	 * @param dataSourceObjId Data source id where the instance is found.
+	 * @param instanceType    Instance type.
+	 * @param connection      The current database connection.
+	 *
+	 * @throws TskCoreException If there is an error creating the account
+	 *                          instance.
+	 */
+	void newOsAccountInstance(long osAccountId, long dataSourceObjId, OsAccountInstance.OsAccountInstanceType instanceType, CaseDbConnection connection) throws TskCoreException {
+		/*
+		 * Check the cache of OS account instances for an existing instance for
+		 * this OS account and data source. Note that the account instance
+		 * created here has a bogus instance ID. This is possible since the
+		 * instance ID is not considered in the equals() and hashCode() methods
+		 * of this class.
+		 */
+		synchronized (osAcctInstancesCacheLock) {
+			if (osAccountInstanceCache.contains(new OsAccountInstance(db, 0, osAccountId, dataSourceObjId, instanceType))) {
+				return;
+			}
+		}
+
+		/*
+		 * Create the OS account instance.
+		 */
+		db.acquireSingleUserCaseWriteLock();
+		try {
+			String accountInsertSQL = db.getInsertOrIgnoreSQL("INTO tsk_os_account_instances(os_account_obj_id, data_source_obj_id, instance_type)"
+					+ " VALUES (?, ?, ?)"); // NON-NLS
+			PreparedStatement preparedStatement = connection.getPreparedStatement(accountInsertSQL, Statement.RETURN_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+			preparedStatement.setLong(1, osAccountId);
+			preparedStatement.setLong(2, dataSourceObjId);
+			preparedStatement.setInt(3, instanceType.getId());
+			connection.executeUpdate(preparedStatement);
+			try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) {
+				if (resultSet.next()) {
+					OsAccountInstance accountInstance = new OsAccountInstance(db, resultSet.getLong(1), osAccountId, dataSourceObjId, instanceType);
+					synchronized (osAcctInstancesCacheLock) {
+						osAccountInstanceCache.add(accountInstance);
+					}
+					/*
+					 * There is a potential issue here. The cache of OS account
+					 * instances is an optimization and was not intended to be
+					 * used as an authoritative indicator of whether or not a
+					 * particular OS account instance was already added to the
+					 * case. In fact, the entire cache is flushed during merge
+					 * operations. But regardless, there is a check-then-act
+					 * race condition for multi-user cases, with or without the
+					 * cache. And although the case database schema and the SQL
+					 * returned by getInsertOrIgnoreSQL() seamlessly prevents
+					 * duplicates in the case database, a valid row ID is
+					 * returned here even if the INSERT is not done. So the
+					 * bottom line is that a redundant event may be published
+					 * from time to time.
+					 */
+					db.fireTSKEvent(new TskEvent.OsAcctInstancesAddedTskEvent(Collections.singletonList(accountInstance)));
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error adding OS account instance for OS account object id = %d, data source object id = %d", osAccountId, dataSourceObjId), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Get all accounts that had an instance on the specified host.
+	 *
+	 * @param host Host for which to look accounts for.
+	 *
+	 * @return Set of OsAccounts, may be empty.
+	 *
+	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 */
+	public List<OsAccount> getOsAccounts(Host host) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_os_accounts as accounts "
+				+ " JOIN tsk_os_account_instances as instances "
+				+ "		ON instances.os_account_obj_id = accounts.os_account_obj_id "
+				+ " JOIN data_source_info as datasources "
+				+ "		ON datasources.obj_id = instances.data_source_obj_id "
+				+ " WHERE datasources.host_id = " + host.getHostId()
+				+ " AND accounts.db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			List<OsAccount> accounts = new ArrayList<>();
+			while (rs.next()) {
+				accounts.add(osAccountFromResultSet(rs));
+			}
+			return accounts;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS accounts for host id = %d", host.getHostId()), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Merge all OS accounts from sourceRealm into destRealm. After this call: -
+	 * sourceRealm's accounts will have been moved or merged - References to
+	 * sourceRealm accounts will be updated - sourceRealm will still exist, but
+	 * will be empty
+	 *
+	 * @param sourceRealm The source realm.
+	 * @param destRealm   The destination realm.
+	 * @param trans       The current transaction.
+	 *
+	 * @throws TskCoreException
+	 */
+	void mergeOsAccountsForRealms(OsAccountRealm sourceRealm, OsAccountRealm destRealm, CaseDbTransaction trans) throws TskCoreException {
+		List<OsAccount> destinationAccounts = getOsAccounts(destRealm, trans.getConnection());
+		List<OsAccount> sourceAccounts = getOsAccounts(sourceRealm, trans.getConnection());
+
+		for (OsAccount sourceAccount : sourceAccounts) {
+
+			// First a check for the case where the source account has both the login name and unique ID set and
+			// we have separate matches in the destination account for both. If we find this case, we need to first merge
+			// the two accounts in the destination realm. This will ensure that all source accounts match at most one
+			// destination account.
+			// Note that we only merge accounts based on login name if the unique ID is empty.
+			if (sourceAccount.getAddr().isPresent() && sourceAccount.getLoginName().isPresent()) {
+				List<OsAccount> duplicateDestAccounts = destinationAccounts.stream()
+						.filter(p -> p.getAddr().equals(sourceAccount.getAddr())
+						|| (p.getLoginName().equals(sourceAccount.getLoginName()) && (!p.getAddr().isPresent())))
+						.collect(Collectors.toList());
+				if (duplicateDestAccounts.size() > 1) {
+					OsAccount combinedDestAccount = duplicateDestAccounts.get(0);
+					duplicateDestAccounts.remove(combinedDestAccount);
+					for (OsAccount dupeDestAccount : duplicateDestAccounts) {
+						mergeOsAccounts(dupeDestAccount, combinedDestAccount, trans);
+					}
+				}
+			}
+
+			// Look for matching destination account
+			OsAccount matchingDestAccount = null;
+
+			// First look for matching unique id
+			if (sourceAccount.getAddr().isPresent()) {
+				List<OsAccount> matchingDestAccounts = destinationAccounts.stream()
+						.filter(p -> p.getAddr().equals(sourceAccount.getAddr()))
+						.collect(Collectors.toList());
+				if (!matchingDestAccounts.isEmpty()) {
+					matchingDestAccount = matchingDestAccounts.get(0);
+				}
+			}
+
+			// If a match wasn't found yet, look for a matching login name.
+			// We will merge only if:
+			// - We didn't already find a unique ID match
+			// - The source account has no unique ID OR the destination account has no unique ID
+			if (matchingDestAccount == null && sourceAccount.getLoginName().isPresent()) {
+				List<OsAccount> matchingDestAccounts = destinationAccounts.stream()
+						.filter(p -> (p.getLoginName().equals(sourceAccount.getLoginName())
+						&& ((!sourceAccount.getAddr().isPresent()) || (!p.getAddr().isPresent()))))
+						.collect(Collectors.toList());
+				if (!matchingDestAccounts.isEmpty()) {
+					matchingDestAccount = matchingDestAccounts.get(0);
+				}
+			}
+
+			// If we found a match, merge the accounts. Otherwise simply update the realm id
+			if (matchingDestAccount != null) {
+				mergeOsAccounts(sourceAccount, matchingDestAccount, trans);
+			} else {
+				String query = "UPDATE tsk_os_accounts SET realm_id = " + destRealm.getRealmId() + " WHERE os_account_obj_id = " + sourceAccount.getId();
+				try (Statement s = trans.getConnection().createStatement()) {
+					s.executeUpdate(query);
+				} catch (SQLException ex) {
+					throw new TskCoreException("Error executing SQL update: " + query, ex);
+				}
+				trans.registerChangedOsAccount(sourceAccount);
+			}
+		}
+	}
+
+	/**
+	 * Merges data between two accounts so that only one is active at the end
+	 * and all references are to it. Data from the destination account will take
+	 * priority. Basic operation: - Update the destination if source has names,
+	 * etc. not already in the destination - Update any references to the source
+	 * (such as in tsk_files) to point to destination - Mark the source as
+	 * "MERGED" and it will not come back in future queries.
+	 *
+	 * @param sourceAccount The source account.
+	 * @param destAccount   The destination account.
+	 * @param trans         The current transaction.
+	 *
+	 * @throws TskCoreException
+	 */
+	private void mergeOsAccounts(OsAccount sourceAccount, OsAccount destAccount, CaseDbTransaction trans) throws TskCoreException {
+
+		String query = "";
+		try (Statement s = trans.getConnection().createStatement()) {
+
+			// Update all references
+			query = makeOsAccountUpdateQuery("tsk_os_account_attributes", sourceAccount, destAccount);
+			s.executeUpdate(query);
+
+			// tsk_os_account_instances has a unique constraint on os_account_obj_id, data_source_obj_id, host_id,
+			// so delete any rows that would be duplicates.
+			query = "DELETE FROM tsk_os_account_instances "
+					+ "WHERE id IN ( "
+					+ "SELECT "
+					+ "  sourceAccountInstance.id "
+					+ "FROM "
+					+ "  tsk_os_account_instances destAccountInstance "
+					+ "INNER JOIN tsk_os_account_instances sourceAccountInstance ON destAccountInstance.data_source_obj_id = sourceAccountInstance.data_source_obj_id "
+					+ "WHERE destAccountInstance.os_account_obj_id = " + destAccount.getId()
+					+ " AND sourceAccountInstance.os_account_obj_id = " + sourceAccount.getId() + " )";
+			s.executeUpdate(query);
+
+			query = makeOsAccountUpdateQuery("tsk_os_account_instances", sourceAccount, destAccount);
+			s.executeUpdate(query);
+			synchronized (osAcctInstancesCacheLock) {
+				osAccountInstanceCache.clear();
+			}
+
+			query = makeOsAccountUpdateQuery("tsk_files", sourceAccount, destAccount);
+			s.executeUpdate(query);
+
+			query = makeOsAccountUpdateQuery("tsk_data_artifacts", sourceAccount, destAccount);
+			s.executeUpdate(query);
+
+			// Update the source account. Make a dummy signature to prevent problems with the unique constraint.
+			String mergedSignature = makeMergedOsAccountSignature();
+			query = "UPDATE tsk_os_accounts SET merged_into = " + destAccount.getId()
+					+ ", db_status = " + OsAccount.OsAccountDbStatus.MERGED.getId()
+					+ ", signature = '" + mergedSignature + "' "
+					+ " WHERE os_account_obj_id = " + sourceAccount.getId();
+
+			s.executeUpdate(query);
+			trans.registerDeletedOsAccount(sourceAccount.getId());
+
+			// Merge and update the destination account. Note that this must be done after updating
+			// the source account to prevent conflicts when merging two accounts in the
+			// same realm.
+			mergeOsAccountObjectsAndUpdateDestAccount(sourceAccount, destAccount, trans);
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error executing SQL update: " + query, ex);
+		}
+	}
+
+	/**
+	 * Create a random signature for accounts that have been merged.
+	 *
+	 * @return The random signature.
+	 */
+	private String makeMergedOsAccountSignature() {
+		return "MERGED " + UUID.randomUUID().toString();
+	}
+
+	/**
+	 * Create the query to update the os account column to the merged account.
+	 *
+	 * @param tableName     Name of table to update.
+	 * @param sourceAccount The source account.
+	 * @param destAccount   The destination account.
+	 *
+	 * @return The query.
+	 */
+	private String makeOsAccountUpdateQuery(String tableName, OsAccount sourceAccount, OsAccount destAccount) {
+		return "UPDATE " + tableName + " SET os_account_obj_id = " + destAccount.getId() + " WHERE os_account_obj_id = " + sourceAccount.getId();
+	}
+
+	/**
+	 * Copy all fields from sourceAccount that are not set in destAccount.
+	 *
+	 * Updates the dest account in the database.
+	 *
+	 * @param sourceAccount The source account.
+	 * @param destAccount   The destination account.
+	 * @param trans	        Transaction to use for database operations.
+	 *
+	 * @return OsAccount Updated account.
+	 */
+	private OsAccount mergeOsAccountObjectsAndUpdateDestAccount(OsAccount sourceAccount, OsAccount destAccount, CaseDbTransaction trans) throws TskCoreException {
+
+		OsAccount mergedDestAccount = destAccount;
+
+		String destLoginName = null;
+		String destAddr = null;
+
+		// Copy any fields that aren't set in the destination to the value from the source account.
+		if (!destAccount.getLoginName().isPresent() && sourceAccount.getLoginName().isPresent()) {
+			destLoginName = sourceAccount.getLoginName().get();
+		}
+
+		if (!destAccount.getAddr().isPresent() && sourceAccount.getAddr().isPresent()) {
+			destAddr = sourceAccount.getAddr().get();
+		}
+
+		// update the dest account core 
+		OsAccountUpdateResult updateStatus = this.updateOsAccountCore(destAccount, destAddr, destLoginName, trans);
+
+		if (updateStatus.getUpdateStatusCode() == OsAccountUpdateStatus.UPDATED && updateStatus.getUpdatedAccount().isPresent()) {
+			mergedDestAccount = updateStatus.getUpdatedAccount().get();
+		}
+
+		String destFullName = null;
+		Long destCreationTime = null;
+		if (!destAccount.getFullName().isPresent() && sourceAccount.getFullName().isPresent()) {
+			destFullName = sourceAccount.getFullName().get();
+		}
+
+		if (!destAccount.getCreationTime().isPresent() && sourceAccount.getCreationTime().isPresent()) {
+			destCreationTime = sourceAccount.getCreationTime().get();
+		}
+
+		// update the dest account properties 
+		updateStatus = this.updateStandardOsAccountAttributes(destAccount, destFullName, null, null, destCreationTime, trans);
+
+		if (updateStatus.getUpdateStatusCode() == OsAccountUpdateStatus.UPDATED && updateStatus.getUpdatedAccount().isPresent()) {
+			mergedDestAccount = updateStatus.getUpdatedAccount().get();
+		}
+
+		return mergedDestAccount;
+	}
+
+	/**
+	 * Get all active accounts associated with the given realm.
+	 *
+	 * @param realm      Realm for which to look accounts for.
+	 * @param connection Current database connection.
+	 *
+	 * @return Set of OsAccounts, may be empty.
+	 *
+	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 */
+	private List<OsAccount> getOsAccounts(OsAccountRealm realm, CaseDbConnection connection) throws TskCoreException {
+		String queryString = "SELECT * FROM tsk_os_accounts"
+				+ " WHERE realm_id = " + realm.getRealmId()
+				+ " AND db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId()
+				+ " ORDER BY os_account_obj_id";
+
+		try (Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			List<OsAccount> accounts = new ArrayList<>();
+			while (rs.next()) {
+				accounts.add(osAccountFromResultSet(rs));
+			}
+			return accounts;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS accounts for realm id = %d", realm.getRealmId()), ex);
+		}
+	}
+
+	/**
+	 * Get all active accounts.
+	 *
+	 * @return Set of OsAccounts, may be empty.
+	 *
+	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 */
+	public List<OsAccount> getOsAccounts() throws TskCoreException {
+		String queryString = "SELECT * FROM tsk_os_accounts"
+				+ " WHERE db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			List<OsAccount> accounts = new ArrayList<>();
+			while (rs.next()) {
+				accounts.add(osAccountFromResultSet(rs));
+			}
+			return accounts;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS accounts"), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets an OS account using Windows-specific data.
+	 *
+	 * @param sid           Account SID, maybe null if loginName is supplied.
+	 * @param loginName     Login name, maybe null if sid is supplied.
+	 * @param realmName     Realm within which the accountId or login name is
+	 *                      unique. Can be null if sid is supplied.
+	 * @param referringHost Host referring the account.
+	 *
+	 * @return Optional with OsAccount, Optional.empty if no matching OsAccount
+	 *         is found.
+	 *
+	 * @throws TskCoreException    If there is an error getting the account.
+	 * @throws NotUserSIDException If the given SID is not a user SID.
+	 */
+	public Optional<OsAccount> getWindowsOsAccount(String sid, String loginName, String realmName, Host referringHost) throws TskCoreException, NotUserSIDException {
+
+		if (referringHost == null) {
+			throw new TskCoreException("A referring host is required to get an account.");
+		}
+
+		// ensure at least one of the two is supplied - sid or a login name
+		if (StringUtils.isBlank(sid) && StringUtils.isBlank(loginName)) {
+			throw new TskCoreException("Cannot get an OS account with both SID and loginName as null.");
+		}
+
+		// first get the realm for the given sid
+		Optional<OsAccountRealm> realm = db.getOsAccountRealmManager().getWindowsRealm(sid, realmName, referringHost);
+		if (!realm.isPresent()) {
+			return Optional.empty();
+		}
+
+		// search by SID
+		if (!Strings.isNullOrEmpty(sid)) {
+			if (!WindowsAccountUtils.isWindowsUserSid(sid)) {
+				throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", sid));
+			}
+
+			return this.getOsAccountByAddr(sid, realm.get());
+		}
+
+		// search by login name
+		return this.getOsAccountByLoginName(loginName, realm.get());
+	}
+
+	/**
+	 * Adds a rows to the tsk_os_account_attributes table for the given set of
+	 * attribute.
+	 *
+	 * @param account           Account for which the attributes is being added.
+	 * @param accountAttributes List of attributes to add.
+	 *
+	 * @throws TskCoreException
+	 */
+	public void addExtendedOsAccountAttributes(OsAccount account, List<OsAccountAttribute> accountAttributes) throws TskCoreException {
+
+		synchronized (account) {  // synchronized to prevent multiple threads trying to add osAccount attributes concurrently to the same osAccount.
+			db.acquireSingleUserCaseWriteLock();
+
+			try (CaseDbConnection connection = db.getConnection()) {
+				for (OsAccountAttribute accountAttribute : accountAttributes) {
+
+					String attributeInsertSQL = "INSERT INTO tsk_os_account_attributes(os_account_obj_id, host_id, source_obj_id, attribute_type_id, value_type, value_byte, value_text, value_int32, value_int64, value_double)"
+							+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; // NON-NLS
+
+					PreparedStatement preparedStatement = connection.getPreparedStatement(attributeInsertSQL, Statement.RETURN_GENERATED_KEYS);
+					preparedStatement.clearParameters();
+
+					preparedStatement.setLong(1, account.getId());
+					if (accountAttribute.getHostId().isPresent()) {
+						preparedStatement.setLong(2, accountAttribute.getHostId().get());
+					} else {
+						preparedStatement.setNull(2, java.sql.Types.NULL);
+					}
+					if (accountAttribute.getSourceObjectId().isPresent()) {
+						preparedStatement.setLong(3, accountAttribute.getSourceObjectId().get());
+					} else {
+						preparedStatement.setNull(3, java.sql.Types.NULL);
+					}
+
+					preparedStatement.setLong(4, accountAttribute.getAttributeType().getTypeID());
+					preparedStatement.setLong(5, accountAttribute.getAttributeType().getValueType().getType());
+
+					if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) {
+						preparedStatement.setBytes(6, accountAttribute.getValueBytes());
+					} else {
+						preparedStatement.setBytes(6, null);
+					}
+
+					if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING
+							|| accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) {
+						preparedStatement.setString(7, accountAttribute.getValueString());
+					} else {
+						preparedStatement.setString(7, null);
+					}
+					if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) {
+						preparedStatement.setInt(8, accountAttribute.getValueInt());
+					} else {
+						preparedStatement.setNull(8, java.sql.Types.NULL);
+					}
+
+					if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME
+							|| accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG) {
+						preparedStatement.setLong(9, accountAttribute.getValueLong());
+					} else {
+						preparedStatement.setNull(9, java.sql.Types.NULL);
+					}
+
+					if (accountAttribute.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) {
+						preparedStatement.setDouble(10, accountAttribute.getValueDouble());
+					} else {
+						preparedStatement.setNull(10, java.sql.Types.NULL);
+					}
+
+					connection.executeUpdate(preparedStatement);
+				}
+			} catch (SQLException ex) {
+				throw new TskCoreException(String.format("Error adding OS Account attribute for account id = %d", account.getId()), ex);
+			} finally {
+				db.releaseSingleUserCaseWriteLock();
+			}
+			// set the atrribute list in account to the most current list from the database
+			List<OsAccountAttribute> currentAttribsList = getOsAccountAttributes(account);
+			account.setAttributesInternal(currentAttribsList);
+		}
+		fireChangeEvent(account);
+	}
+
+	/**
+	 * Get the OS account attributes for the given account.
+	 *
+	 * @param account Account to get the attributes for.
+	 *
+	 * @return List of attributes, may be an empty list.
+	 *
+	 * @throws TskCoreException
+	 */
+	List<OsAccountAttribute> getOsAccountAttributes(OsAccount account) throws TskCoreException {
+
+		String queryString = "SELECT attributes.os_account_obj_id as os_account_obj_id, attributes.host_id as host_id, attributes.source_obj_id as source_obj_id, "
+				+ " attributes.attribute_type_id as attribute_type_id,  attributes.value_type as value_type, attributes.value_byte as value_byte, "
+				+ " attributes.value_text as value_text, attributes.value_int32 as value_int32, attributes.value_int64 as value_int64, attributes.value_double as value_double, "
+				+ " hosts.id, hosts.name as host_name, hosts.db_status as host_status "
+				+ " FROM tsk_os_account_attributes as attributes"
+				+ "		LEFT JOIN tsk_hosts as hosts "
+				+ " ON attributes.host_id = hosts.id "
+				+ " WHERE os_account_obj_id = " + account.getId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			List<OsAccountAttribute> attributes = new ArrayList<>();
+			while (rs.next()) {
+
+				Host host = null;
+				long hostId = rs.getLong("host_id");
+				if (!rs.wasNull()) {
+					host = new Host(hostId, rs.getString("host_name"), Host.HostDbStatus.fromID(rs.getInt("host_status")));
+				}
+
+				Content sourceContent = null;
+				long sourceObjId = rs.getLong("source_obj_id");
+				if (!rs.wasNull()) {
+					sourceContent = this.db.getContentById(sourceObjId);
+				}
+				BlackboardAttribute.Type attributeType = db.getAttributeType(rs.getInt("attribute_type_id"));
+				OsAccountAttribute attribute = account.new OsAccountAttribute(attributeType, rs.getInt("value_int32"), rs.getLong("value_int64"),
+						rs.getDouble("value_double"), rs.getString("value_text"), rs.getBytes("value_byte"),
+						db, account, host, sourceContent);
+
+				attributes.add(attribute);
+			}
+			return attributes;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting OS account attributes for account obj id = %d", account.getId()), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets the OS account instances for a given OS account.
+	 *
+	 * @param account The OS account.
+	 *
+	 * @return The OS account instances, may be an empty list.
+	 *
+	 * @throws TskCoreException
+	 */
+	List<OsAccountInstance> getOsAccountInstances(OsAccount account) throws TskCoreException {
+		String whereClause = "tsk_os_account_instances.os_account_obj_id = " + account.getId();
+		return getOsAccountInstances(whereClause);
+	}
+
+	/**
+	 * Gets the OS account instances with the given instance IDs.
+	 *
+	 * @param instanceIDs The instance IDs.
+	 *
+	 * @return The OS account instances.
+	 *
+	 * @throws TskCoreException Thrown if there is an error querying the case
+	 *                          database.
+	 */
+	public List<OsAccountInstance> getOsAccountInstances(List<Long> instanceIDs) throws TskCoreException {
+		String instanceIds = instanceIDs.stream().map(id -> id.toString()).collect(Collectors.joining(","));
+		String whereClause = "tsk_os_account_instances.id IN (" + instanceIds + ")";
+		return getOsAccountInstances(whereClause);
+	}
+
+	/**
+	 * Gets the OS account instances that satisfy the given SQL WHERE clause.
+	 *
+	 * @param whereClause The SQL WHERE clause.
+	 *
+	 * @return The OS account instances.
+	 *
+	 * @throws TskCoreException Thrown if there is an error querying the case
+	 *                          database.
+	 */
+	private List<OsAccountInstance> getOsAccountInstances(String whereClause) throws TskCoreException {
+		List<OsAccountInstance> osAcctInstances = new ArrayList<>();
+		String querySQL = "SELECT * FROM tsk_os_account_instances WHERE " + whereClause;
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection();
+				PreparedStatement preparedStatement = connection.getPreparedStatement(querySQL, Statement.NO_GENERATED_KEYS);
+				ResultSet results = connection.executeQuery(preparedStatement)) {
+			while (results.next()) {
+				long instanceId = results.getLong("id");
+				long osAccountObjID = results.getLong("os_account_obj_id");
+				long dataSourceObjId = results.getLong("data_source_obj_id");
+				int instanceType = results.getInt("instance_type");
+				osAcctInstances.add(new OsAccountInstance(db, instanceId, osAccountObjID, dataSourceObjId, OsAccountInstance.OsAccountInstanceType.fromID(instanceType)));
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException("Failed to get OsAccountInstances (SQL = " + querySQL + ")", ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+		return osAcctInstances;
+	}
+
+	/**
+	 * Updates the properties of the specified account in the database.
+	 *
+	 * A column is updated only if a non-null value has been specified.
+	 *
+	 * @param osAccount     OsAccount that needs to be updated in the database.
+	 * @param fullName      Full name, may be null.
+	 * @param accountType   Account type, may be null
+	 * @param accountStatus Account status, may be null.
+	 * @param creationTime  Creation time, may be null.
+	 *
+	 * @return OsAccountUpdateResult Account update status, and updated account.
+	 *
+	 * @throws TskCoreException If there is a database error or if the updated
+	 *                          information conflicts with an existing account.
+	 */
+	public OsAccountUpdateResult updateStandardOsAccountAttributes(OsAccount osAccount, String fullName, OsAccountType accountType, OsAccountStatus accountStatus, Long creationTime) throws TskCoreException {
+
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			OsAccountUpdateResult updateStatus = updateStandardOsAccountAttributes(osAccount, fullName, accountType, accountStatus, creationTime, trans);
+
+			trans.commit();
+			trans = null;
+
+			return updateStatus;
+		} finally {
+			if (trans != null) {
+				trans.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Updates the properties of the specified account in the database.
+	 *
+	 * A column is updated only if a non-null value has been specified.
+	 *
+	 * @param osAccount     OsAccount that needs to be updated in the database.
+	 * @param fullName      Full name, may be null.
+	 * @param accountType   Account type, may be null
+	 * @param accountStatus Account status, may be null.
+	 * @param creationTime  Creation time, may be null.
+	 * @param trans         Transaction to use for database operation.
+	 *
+	 * @return OsAccountUpdateResult Account update status, and updated account.
+	 *
+	 * @throws TskCoreException If there is a database error or if the updated
+	 *                          information conflicts with an existing account.
+	 */
+	OsAccountUpdateResult updateStandardOsAccountAttributes(OsAccount osAccount, String fullName, OsAccountType accountType, OsAccountStatus accountStatus, Long creationTime, CaseDbTransaction trans) throws TskCoreException {
+
+		OsAccountUpdateStatus updateStatusCode = OsAccountUpdateStatus.NO_CHANGE;
+
+		try {
+			CaseDbConnection connection = trans.getConnection();
+
+			if (!StringUtils.isBlank(fullName)) {
+				updateAccountColumn(osAccount.getId(), "full_name", fullName, connection);
+				updateStatusCode = OsAccountUpdateStatus.UPDATED;
+			}
+
+			if (Objects.nonNull(accountType)) {
+				updateAccountColumn(osAccount.getId(), "type", accountType, connection);
+				updateStatusCode = OsAccountUpdateStatus.UPDATED;
+			}
+
+			if (Objects.nonNull(accountStatus)) {
+				updateAccountColumn(osAccount.getId(), "status", accountStatus, connection);
+				updateStatusCode = OsAccountUpdateStatus.UPDATED;
+			}
+
+			if (Objects.nonNull(creationTime)) {
+				updateAccountColumn(osAccount.getId(), "created_date", creationTime, connection);
+				updateStatusCode = OsAccountUpdateStatus.UPDATED;
+			}
+
+			// if nothing has been changed, return
+			if (updateStatusCode == OsAccountUpdateStatus.NO_CHANGE) {
+				return new OsAccountUpdateResult(updateStatusCode, null);
+			}
+
+			// get the updated account from database
+			OsAccount updatedAccount = getOsAccountByObjectId(osAccount.getId(), connection);
+
+			// register the updated account with the transaction to fire off an event
+			trans.registerChangedOsAccount(updatedAccount);
+
+			return new OsAccountUpdateResult(updateStatusCode, updatedAccount);
+
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating account with addr = %s, account id = %d", osAccount.getAddr().orElse("Unknown"), osAccount.getId()), ex);
+		}
+	}
+
+	/**
+	 * Updates specified column in the tsk_os_accounts table to the specified
+	 * value.
+	 *
+	 * @param <T>          Type of value - must be a String, Long or an Integer.
+	 * @param accountObjId Object id of the account to be updated.
+	 * @param colName      Name of column o be updated.
+	 * @param colValue     New column value.
+	 * @param connection   Database connection to use.
+	 *
+	 * @throws SQLException     If there is an error updating the database.
+	 * @throws TskCoreException If the value type is not handled.
+	 */
+	private <T> void updateAccountColumn(long accountObjId, String colName, T colValue, CaseDbConnection connection) throws SQLException, TskCoreException {
+
+		String updateSQL = "UPDATE tsk_os_accounts "
+				+ " SET " + colName + " = ? "
+				+ " WHERE os_account_obj_id = ?";
+
+		db.acquireSingleUserCaseWriteLock();
+		try {
+			PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+
+			if (Objects.isNull(colValue)) {
+				preparedStatement.setNull(1, Types.NULL); // handle null value
+			} else {
+				if (colValue instanceof String) {
+					preparedStatement.setString(1, (String) colValue);
+				} else if (colValue instanceof Long) {
+					preparedStatement.setLong(1, (Long) colValue);
+				} else if (colValue instanceof Integer) {
+					preparedStatement.setInt(1, (Integer) colValue);
+				} else {
+					throw new TskCoreException(String.format("Unhandled column data type received while updating the account (%d) ", accountObjId));
+				}
+			}
+
+			preparedStatement.setLong(2, accountObjId);
+
+			connection.executeUpdate(preparedStatement);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Updates the signature of the specified account, if the db status of the
+	 * account is active.
+	 *
+	 * @param accountObjId Object id of the account to be updated.
+	 * @param signature    New signature.
+	 * @param connection   Database connection to use.
+	 *
+	 * @throws SQLException If there is an error updating the database.
+	 */
+	private void updateAccountSignature(long accountObjId, String signature, CaseDbConnection connection) throws SQLException {
+
+		String updateSQL = "UPDATE tsk_os_accounts SET "
+				+ "		signature = "
+				+ "       CASE WHEN db_status = " + OsAccount.OsAccountDbStatus.ACTIVE.getId() + " THEN ? ELSE signature END  "
+				+ " WHERE os_account_obj_id = ?";	// 8
+
+		PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS);
+		preparedStatement.clearParameters();
+
+		preparedStatement.setString(1, signature);
+		preparedStatement.setLong(2, accountObjId);
+
+		connection.executeUpdate(preparedStatement);
+	}
+
+	/**
+	 * Update the address and/or login name for the specified account in the
+	 * database. Also update the realm addr/name if needed.
+	 *
+	 * A column is updated only if its current value is null and a non-null
+	 * value has been specified.
+	 *
+	 *
+	 * @param osAccount     OsAccount that needs to be updated in the database.
+	 * @param accountSid    Account SID, may be null.
+	 * @param loginName     Login name, may be null.
+	 * @param realmName     Realm name for the account.
+	 * @param referringHost Host.
+	 *
+	 * @return OsAccountUpdateResult Account update status, and the updated
+	 *         account.
+	 *
+	 * @throws TskCoreException If there is a database error or if the updated
+	 *                          information conflicts with an existing account.
+	 */
+	public OsAccountUpdateResult updateCoreWindowsOsAccountAttributes(OsAccount osAccount, String accountSid, String loginName, String realmName, Host referringHost) throws TskCoreException, NotUserSIDException {
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			OsAccountUpdateResult updateStatus = this.updateCoreWindowsOsAccountAttributes(osAccount, accountSid, loginName, realmName, referringHost, trans);
+
+			trans.commit();
+			trans = null;
+			return updateStatus;
+		} finally {
+			if (trans != null) {
+				trans.rollback();
+			}
+		}
+	}
+
+	/**
+	 * Update the address and/or login name for the specified account in the
+	 * database. Also update the realm addr/name if needed.
+	 *
+	 * A column is updated only if it's current value is null and a non-null
+	 * value has been specified.
+	 *
+	 * @param osAccount  OsAccount that needs to be updated in the database.
+	 * @param accountSid Account SID, may be null.
+	 * @param loginName  Login name, may be null.
+	 * @param realmName  Account realm name. May be null if accountSid is not
+	 *                   null.
+	 *
+	 * @return OsAccountUpdateResult Account update status, and the updated
+	 *         account.
+	 *
+	 * @throws TskCoreException If there is a database error or if the updated
+	 *                          information conflicts with an existing account.
+	 */
+	private OsAccountUpdateResult updateCoreWindowsOsAccountAttributes(OsAccount osAccount, String accountSid, String loginName, String realmName, Host referringHost, CaseDbTransaction trans) throws TskCoreException, NotUserSIDException {
+
+		// first get and update the realm - if we have the info to find the realm
+		if (!StringUtils.isBlank(accountSid) || !StringUtils.isBlank(realmName)) {
+			db.getOsAccountRealmManager().getAndUpdateWindowsRealm(accountSid, realmName, referringHost, trans.getConnection());
+		}
+
+		// now update the account core data
+		OsAccountUpdateResult updateStatus = this.updateOsAccountCore(osAccount, accountSid, loginName, trans);
+
+		return updateStatus;
+	}
+
+	/**
+	 * Update the address and/or login name for the specified account in the
+	 * database.
+	 *
+	 * A column is updated only if its current value is null and a non-null
+	 * value has been specified.
+	 *
+	 *
+	 * NOTE: Will not merge accounts if the updated information conflicts with
+	 * an existing account (such as adding an ID to an account that has only a
+	 * name and there already being an account with that ID).
+	 *
+	 * @param osAccount OsAccount that needs to be updated in the database.
+	 * @param address   Account address, may be null.
+	 * @param loginName Login name, may be null.
+	 *
+	 * @return OsAccountUpdateResult Account update status, and the updated
+	 *         account.
+	 *
+	 * @throws TskCoreException If there is a database error or if the updated
+	 *                          information conflicts with an existing account.
+	 */
+	private OsAccountUpdateResult updateOsAccountCore(OsAccount osAccount, String address, String loginName, CaseDbTransaction trans) throws TskCoreException {
+
+		OsAccountUpdateStatus updateStatusCode = OsAccountUpdateStatus.NO_CHANGE;
+		OsAccount updatedAccount;
+
+		try {
+			CaseDbConnection connection = trans.getConnection();
+
+			// if a new addr is provided and the account already has an address, and they are not the same, throw an exception
+			if (!StringUtils.isBlank(address) && !StringUtils.isBlank(osAccount.getAddr().orElse(null)) && !address.equalsIgnoreCase(osAccount.getAddr().orElse(""))) {
+				throw new TskCoreException(String.format("Account (%d) already has an address (%s), address cannot be updated.", osAccount.getId(), osAccount.getAddr().orElse("NULL")));
+			}
+
+			// if a new login name is provided and the account already has a loginname and they are not the same, throw an exception
+			if (!StringUtils.isBlank(loginName) && !StringUtils.isBlank(osAccount.getLoginName().orElse(null)) && !loginName.equalsIgnoreCase(osAccount.getLoginName().orElse(""))) {
+				throw new TskCoreException(String.format("Account (%d) already has a login name (%s), login name cannot be updated.", osAccount.getId(), osAccount.getLoginName().orElse("NULL")));
+			}
+
+			if (StringUtils.isBlank(osAccount.getAddr().orElse(null)) && !StringUtils.isBlank(address)) {
+				updateAccountColumn(osAccount.getId(), "addr", address, connection);
+				updateStatusCode = OsAccountUpdateStatus.UPDATED;
+			}
+
+			if (StringUtils.isBlank(osAccount.getLoginName().orElse(null)) && !StringUtils.isBlank(loginName)) {
+				updateAccountColumn(osAccount.getId(), "login_name", loginName, connection);
+				updateStatusCode = OsAccountUpdateStatus.UPDATED;
+			}
+
+			// if nothing is changed, return
+			if (updateStatusCode == OsAccountUpdateStatus.NO_CHANGE) {
+				return new OsAccountUpdateResult(updateStatusCode, osAccount);
+			}
+
+			// update signature if needed, based on the most current addr/loginName
+			OsAccount currAccount = getOsAccountByObjectId(osAccount.getId(), connection);
+			String newAddress = currAccount.getAddr().orElse(null);
+			String newLoginName = currAccount.getLoginName().orElse(null);
+
+			String newSignature = getOsAccountSignature(newAddress, newLoginName);
+			updateAccountSignature(osAccount.getId(), newSignature, connection);
+
+			// get the updated account from database
+			updatedAccount = getOsAccountByObjectId(osAccount.getId(), connection);
+
+			// register the updated account with the transaction to fire off an event
+			trans.registerChangedOsAccount(updatedAccount);
+
+			return new OsAccountUpdateResult(updateStatusCode, updatedAccount);
+
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating account with unique id = %s, account id = %d", osAccount.getAddr().orElse("Unknown"), osAccount.getId()), ex);
+		}
+	}
+
+	/**
+	 * Returns a list of hosts where the OsAccount has appeared.
+	 *
+	 * @param account OsAccount
+	 *
+	 * @return List of Hosts that reference the given OsAccount.
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<Host> getHosts(OsAccount account) throws TskCoreException {
+		List<Host> hostList = new ArrayList<>();
+
+		String query = "SELECT tsk_hosts.id AS hostId, name, db_status FROM tsk_hosts "
+				+ " JOIN data_source_info ON tsk_hosts.id = data_source_info.host_id"
+				+ "	JOIN tsk_os_account_instances ON data_source_info.obj_id = tsk_os_account_instances.data_source_obj_id"
+				+ " WHERE os_account_obj_id = " + account.getId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, query)) {
+
+			while (rs.next()) {
+				hostList.add(new Host(rs.getLong("hostId"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status"))));
+			}
+
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Failed to get host list for os account %d", account.getId()), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+		return hostList;
+	}
+
+	/**
+	 * Takes in a result with a row from tsk_os_accounts table and creates an
+	 * OsAccount.
+	 *
+	 * @param rs      ResultSet.
+	 * @param realmId Realm.
+	 *
+	 * @return OsAccount OS Account.
+	 *
+	 * @throws SQLException
+	 */
+	private OsAccount osAccountFromResultSet(ResultSet rs) throws SQLException {
+
+		OsAccountType accountType = null;
+		int typeId = rs.getInt("type");
+		if (!rs.wasNull()) {
+			accountType = OsAccount.OsAccountType.fromID(typeId);
+		}
+
+		Long creationTime = rs.getLong("created_date"); // getLong returns 0 if value is null
+		if (rs.wasNull()) {
+			creationTime = null;
+		}
+
+		return new OsAccount(db, rs.getLong("os_account_obj_id"), rs.getLong("realm_id"), rs.getString("login_name"), rs.getString("addr"),
+				rs.getString("signature"), rs.getString("full_name"), creationTime, accountType, OsAccount.OsAccountStatus.fromID(rs.getInt("status")),
+				OsAccount.OsAccountDbStatus.fromID(rs.getInt("db_status")));
+
+	}
+
+	/**
+	 * Fires an OsAccountChangeEvent for the given OsAccount. Do not call this
+	 * with an open transaction.
+	 *
+	 * @param account Updated account.
+	 */
+	private void fireChangeEvent(OsAccount account) {
+		db.fireTSKEvent(new OsAccountsUpdatedTskEvent(Collections.singletonList(account)));
+	}
+
+	/**
+	 * Created an account signature for an OS Account. This signature is simply
+	 * to prevent duplicate accounts from being created. Signature is set to:
+	 * uniqueId: if the account has a uniqueId, otherwise loginName: if the
+	 * account has a login name.
+	 *
+	 * @param uniqueId  Unique id.
+	 * @param loginName Login name.
+	 *
+	 * @return Account signature.
+	 *
+	 * @throws TskCoreException If there is an error creating the account
+	 *                          signature.
+	 */
+	static String getOsAccountSignature(String uniqueId, String loginName) throws TskCoreException {
+		// Create a signature. 
+		String signature;
+		if (Strings.isNullOrEmpty(uniqueId) == false) {
+			signature = uniqueId;
+		} else if (Strings.isNullOrEmpty(loginName) == false) {
+			signature = loginName;
+		} else {
+			throw new TskCoreException("OS Account must have either a uniqueID or a login name.");
+		}
+		return signature;
+	}
+
+	/**
+	 * Exception thrown if a given SID is a valid SID but is a group SID, and
+	 * not an individual user SID.
+	 */
+	public static class NotUserSIDException extends TskException {
+
+		private static final long serialVersionUID = 1L;
+
+		/**
+		 * Default constructor when error message is not available
+		 */
+		public NotUserSIDException() {
+			super("No error message available.");
+		}
+
+		/**
+		 * Create exception containing the error message
+		 *
+		 * @param msg the message
+		 */
+		public NotUserSIDException(String msg) {
+			super(msg);
+		}
+
+		/**
+		 * Create exception containing the error message and cause exception
+		 *
+		 * @param msg the message
+		 * @param ex  cause exception
+		 */
+		public NotUserSIDException(String msg, Exception ex) {
+			super(msg, ex);
+		}
+	}
+
+	/**
+	 * Status of an account update.
+	 */
+	public enum OsAccountUpdateStatus {
+
+		NO_CHANGE, /// no change was made to account.
+		UPDATED, /// account was updated
+		MERGED		/// account update triggered a merge
+	}
+
+	/**
+	 * Container that encapsulates the account update status and the updated
+	 * account.
+	 */
+	public final static class OsAccountUpdateResult {
+
+		private final OsAccountUpdateStatus updateStatus;
+		private final OsAccount updatedAccount;
+
+		OsAccountUpdateResult(OsAccountUpdateStatus updateStatus, OsAccount updatedAccount) {
+			this.updateStatus = updateStatus;
+			this.updatedAccount = updatedAccount;
+		}
+
+		public OsAccountUpdateStatus getUpdateStatusCode() {
+			return updateStatus;
+		}
+
+		public Optional<OsAccount> getUpdatedAccount() {
+			return Optional.ofNullable(updatedAccount);
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java
new file mode 100644
index 0000000000000000000000000000000000000000..79995728adb0cba555a38fe9f0382706839f745f
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealm.java
@@ -0,0 +1,316 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.ResourceBundle;
+import org.apache.commons.lang3.StringUtils;
+
+/**
+ * Realm encapsulates the scope of an OsAccount. An account is unique within a realm.
+ *
+ * A realm may be host scoped, say for a local standalone computer, or 
+ * domain scoped.
+ *
+ * Many times, we may learn about the existence of a realm without fully understanding
+ * it. Such as when we find a Windows SID before we've parsed the registry to know if
+ * it is for the local computer or domain. By default, a realm is created with a 
+ * host-level scope and a confidence of "inferred". 
+ */
+public final class OsAccountRealm {
+	
+	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
+
+	private final long id;	// row id 
+	
+	// a realm may have multiple names - for exmple, for a user ABCCorp\\user1 or user1@ABCcorp.com - 'ABCCorp' and 'ABCcorp.com' both refer to the same realm.
+	// currently we only support a single name, this could be expanded in future.
+	private final String realmName; // realm name
+	
+	private final String realmAddr; // realm address
+	private String signature; // either realm address or name (if address is not known)
+	private final Host host;	// if the realm consists of a single host.  Will be null if the realm is domain scoped. 
+	private final ScopeConfidence scopeConfidence; // confidence in realm scope.
+	private final RealmDbStatus dbStatus; // Status of row in database.
+	
+	/**
+	 * Creates OsAccountRealm.
+	 * 
+	 * @param id              Row Id.
+	 * @param realmName       Realm name, may be null.
+	 * @param realmAddr       Unique numeric address for realm, may be null only
+	 *                        if realm name is not null.
+	 * @param signature       Either the address or the name.
+	 * @param host            Host if the realm is host scoped.
+	 * @param scopeConfidence Scope confidence.
+	 */
+	OsAccountRealm(long id, String realmName, String realmAddr, String signature, Host host, ScopeConfidence scopeConfidence, RealmDbStatus dbStatus) {
+		this.id = id;
+		this.realmName = realmName;
+		this.realmAddr = realmAddr;
+		this.signature = signature;
+		this.host = host;
+		this.scopeConfidence = scopeConfidence;
+		this.dbStatus = dbStatus;
+	}
+
+	/**
+	 * Get the realm row id. 
+	 * 
+	 * @return Realm id.
+	 */
+	long getRealmId() {
+		return id;
+	}
+
+	/**
+	 * Get realm names list.
+	 *
+	 * Currently we only support a single name for realm, so this list may have
+	 * at most a single name. And the list may be empty if there is no name.
+	 *
+	 * @return List of realm names, may be empty.
+	 */
+	public List<String> getRealmNames() {
+		List<String> namesList = new ArrayList<>();
+		if (!Objects.isNull(realmName)) {
+			namesList.add(realmName);
+		}
+
+		return namesList;
+	}
+
+	/**
+	 * Get the realm address, such as part of a Windows SID. 
+	 *
+	 * @return Optional realm unique address.
+	 */
+	public Optional<String> getRealmAddr() {
+		return Optional.ofNullable(realmAddr);
+	}
+
+	/**
+	 * Get the realm signature.
+	 *
+	 * @return Realm signature.
+	 */
+	String getSignature() {
+		return signature;
+	}
+	
+	/**
+	 * Get the realm scope host, if it's a single host realm.
+	 * 
+	 * @return Optional host. Is empty if the scope of the realm is domain-scoped.
+	 */
+	public Optional<Host> getScopeHost() {
+		return Optional.ofNullable(host);
+	}
+
+	/**
+	 * Get realm scope confidence.
+	 * 
+	 * @return Realm scope confidence. 
+	 */
+	public ScopeConfidence getScopeConfidence() {
+		return scopeConfidence;
+	}
+	
+	/**
+	 * Get the database status of this realm.
+	 * 
+	 * @return Realm database status. 
+	 */
+	RealmDbStatus getDbStatus() {
+		return dbStatus;
+	}	
+
+	/**
+	 * Get the realm scope.
+	 * 
+	 * @return Realm scope.
+	 */
+	public RealmScope getScope() {
+		return getScopeHost().isPresent() ? RealmScope.LOCAL : RealmScope.DOMAIN; 
+	}
+	
+	/**
+	 * Enum to encapsulate a realm scope.
+	 *
+	 * Scope of a realm may extend to a single host (local) 
+	 * or to a domain.
+	 */
+	public enum RealmScope {
+		UNKNOWN(0,	bundle.getString("OsAccountRealm.Unknown.text")),			// realm scope is unknown.
+		LOCAL(1,	bundle.getString("OsAccountRealm.Local.text")),				// realm scope is a single host.
+		DOMAIN(2,	bundle.getString("OsAccountRealm.Domain.text"));			// realm scope is a domain.
+		
+		private final int id;
+		private final String name; 
+
+		RealmScope(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		/**
+		 * Get the id of the realm scope.
+		 * 
+		 * @return Realm scope id.
+		 */
+		public int getId() {
+			return id;
+		}
+		
+		/**
+		 * Get the realm scope name.
+		 * 
+		 * @return Realm scope name.
+		 */
+		public String getName() {
+			return name;
+		}
+		
+		/**
+		 * Gets a realm scope confidence enum by id. 
+		 * 
+		 * @param typeId Realm scope confidence id.
+		 * 
+		 * @return ScopeConfidence enum.
+		 */
+		public static RealmScope fromID(int typeId) {
+			for (RealmScope scopeType : RealmScope.values()) {
+				if (scopeType.ordinal() == typeId) {
+					return scopeType;
+				}
+			}
+			return null;
+		}
+	}
+	
+	/**
+	 * Enum to encapsulate scope confidence.
+	 *
+	 * We may know for sure that a realm is domain scope or host scope, based
+	 * on where it is found. Occasionally, we may have to infer or assume a scope to
+	 * initially create a realm.
+	 */
+	public enum ScopeConfidence {
+		KNOWN(0, bundle.getString("OsAccountRealm.Known.text")),			// realm scope is known for sure.
+		INFERRED(1, bundle.getString("OsAccountRealm.Inferred.text"));	// realm scope is inferred
+
+		private final int id;
+		private final String name; 
+
+		ScopeConfidence(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		/**
+		 * Get the id of the realm scope confidence.
+		 * 
+		 * @return Realm scope confidence id.
+		 */
+		public int getId() {
+			return id;
+		}
+		
+		/**
+		 * Get the realm scope confidence name.
+		 * 
+		 * @return Realm scope confidence name.
+		 */
+		public String getName() {
+			return name;
+		}
+		
+		/**
+		 * Gets a realm scope confidence enum by id. 
+		 * 
+		 * @param typeId Realm scope confidence id.
+		 * 
+		 * @return ScopeConfidence enum.
+		 */
+		public static ScopeConfidence fromID(int typeId) {
+			for (ScopeConfidence statusType : ScopeConfidence.values()) {
+				if (statusType.ordinal() == typeId) {
+					return statusType;
+				}
+			}
+			return null;
+		}
+	}
+		
+	 /**
+	  * Set the signature for the account realm.
+	  * 
+	  * @param signature Realm signature.
+	  * 
+	  * @return Returns true of the address is set, false if the address was not
+	 *         changed.
+	  */
+	boolean setSignature(String signature) {
+		if (StringUtils.isNotBlank(signature)) {
+			this.signature = signature;
+			return true;
+		}
+		
+		return false;
+	}
+	
+	
+	/**
+	 * Encapsulates status of realm row.
+	 */
+	enum RealmDbStatus {
+		ACTIVE(0, "Active"),
+		MERGED(1, "Merged"),
+		DELETED(2, "Deleted");	
+
+		private final int id;
+		private final String name;
+
+		RealmDbStatus(int id, String name) {
+			this.id = id;
+			this.name = name;
+		}
+
+		int getId() {
+			return id;
+		}
+
+		String getName() {
+			return name;
+		}
+
+		static RealmDbStatus fromID(int typeId) {
+			for (RealmDbStatus type : RealmDbStatus.values()) {
+				if (type.ordinal() == typeId) {
+					return type;
+				}
+			}
+			return null;
+		}
+	}
+	
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealmManager.java b/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealmManager.java
new file mode 100644
index 0000000000000000000000000000000000000000..f3b3751415e00671e658fad7adef03e87590109c
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/OsAccountRealmManager.java
@@ -0,0 +1,969 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.base.Strings;
+import org.apache.commons.lang3.StringUtils;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.sql.Types;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.logging.Logger;
+import org.sleuthkit.datamodel.OsAccountRealm.ScopeConfidence;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
+
+
+/**
+ * Create/Retrieve/Update OS account realms. Realms represent either an individual
+ * host with local accounts or a domain. 
+ */
+public final class OsAccountRealmManager {
+
+	private static final Logger LOGGER = Logger.getLogger(OsAccountRealmManager.class.getName());
+
+	private final SleuthkitCase db;
+
+	/**
+	 * Construct a OsAccountRealmManager for the given SleuthkitCase.
+	 *
+	 * @param skCase The SleuthkitCase
+	 *
+	 */
+	OsAccountRealmManager(SleuthkitCase skCase) {
+		this.db = skCase;
+	}
+		
+	/**
+	 * Create realm based on Windows information. The input SID is a user/group
+	 * SID.The domain SID is extracted from this incoming SID.
+	 *
+	 * @param accountSid    User/group SID. May be null only if name is not
+	 *                      null.
+	 * @param realmName     Realm name. May be null only if SID is not null.
+	 * @param referringHost Host where realm reference is found.
+	 * @param realmScope    Scope of realm. Use UNKNOWN if you are not sure and
+	 *                      the method will try to detect the correct scope.
+	 *
+	 * @return OsAccountRealm.
+	 *
+	 * @throws TskCoreException                     If there is an error
+	 *                                              creating the realm.
+	 * @throws OsAccountManager.NotUserSIDException If the SID is not a user
+	 *                                              SID.
+	 */
+	public OsAccountRealm newWindowsRealm(String accountSid, String realmName, Host referringHost, OsAccountRealm.RealmScope realmScope) throws TskCoreException, OsAccountManager.NotUserSIDException {
+
+		if (realmScope == null) {
+			throw new TskCoreException("RealmScope cannot be null. Use UNKNOWN if scope is not known.");
+		}
+		if (referringHost == null) {
+			throw new TskCoreException("A referring host is required to create a realm.");
+		}
+		if (StringUtils.isBlank(accountSid) && StringUtils.isBlank(realmName)) {
+			throw new TskCoreException("Either an address or a name is required to create a realm.");
+		}
+		
+		Host scopeHost;
+		OsAccountRealm.ScopeConfidence scopeConfidence;
+		
+		switch (realmScope) {
+			case DOMAIN:
+				scopeHost = null;
+				scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN;
+				break;
+			case LOCAL:
+				scopeHost = referringHost;
+				scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN;
+				break;
+
+			case UNKNOWN:
+			default:
+				// check if the referring host already has a realm
+				boolean isHostRealmKnown = isHostRealmKnown(referringHost);
+				if (isHostRealmKnown) {
+					scopeHost = null;	// the realm does not scope to the referring host since it already has one.
+					scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN;
+				} else {
+					scopeHost = referringHost;
+					scopeConfidence = OsAccountRealm.ScopeConfidence.INFERRED;
+				}
+				break;
+
+		}
+		
+		// get windows realm address from sid
+		String realmAddr = null;
+		if (!Strings.isNullOrEmpty(accountSid)) {
+			
+			if (!WindowsAccountUtils.isWindowsUserSid(accountSid)) {
+				throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", accountSid ));
+			}
+			
+			realmAddr = WindowsAccountUtils.getWindowsRealmAddress(accountSid);
+			
+			// if the account is special windows account, create a local realm for it.
+			if (realmAddr.equals(WindowsAccountUtils.SPECIAL_WINDOWS_REALM_ADDR)) {
+				scopeHost = referringHost;
+				scopeConfidence = OsAccountRealm.ScopeConfidence.KNOWN;
+			}
+		}
+		
+		String signature = makeRealmSignature(realmAddr, realmName, scopeHost);
+		
+		// create a realm
+		return newRealm(realmName, realmAddr, signature, scopeHost, scopeConfidence);
+	}
+	
+	/**
+	 * Get a windows realm by the account SID, or the domain name. The input SID
+	 * is an user/group account SID. The domain SID is extracted from this
+	 * incoming SID.
+	 *
+	 * @param accountSid    Account SID, may be null.
+	 * @param realmName     Realm name, may be null only if accountSid is not
+	 *                      null.
+	 * @param referringHost Referring Host.
+	 *
+	 * @return Optional with OsAccountRealm, Optional.empty if no matching realm
+	 *         is found.
+	 *
+	 * @throws TskCoreException
+	 * @throws OsAccountManager.NotUserSIDException If the SID is not a user
+	 *                                              SID.
+	 */
+	public Optional<OsAccountRealm> getWindowsRealm(String accountSid, String realmName, Host referringHost) throws TskCoreException, OsAccountManager.NotUserSIDException {
+		
+		if (referringHost == null) {
+			throw new TskCoreException("A referring host is required get a realm.");
+		}
+		
+		// need at least one of the two, the addr or name to look up
+		if (Strings.isNullOrEmpty(accountSid) && Strings.isNullOrEmpty(realmName)) {
+			throw new TskCoreException("Realm address or name is required get a realm.");
+		}
+		
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			return getWindowsRealm(accountSid, realmName, referringHost, connection);
+		}
+	}
+	
+	
+	/**
+	 * Get a windows realm by the account SID, or the domain name.
+	 * The input SID is an user/group account SID. The domain SID is extracted from this incoming SID.
+	 * 
+	 * @param accountSid    Account SID, may be null.
+	 * @param realmName     Realm name, may be null only if accountSid is not
+	 *                      null.
+	 * @param referringHost Referring Host.
+	 * @param connection    Database connection to use.
+	 * 
+	 * @return Optional with OsAccountRealm, Optional.empty if no matching realm is found.
+	 * 
+	 * @throws TskCoreException
+	 */
+	Optional<OsAccountRealm> getWindowsRealm(String accountSid, String realmName, Host referringHost, CaseDbConnection connection) throws TskCoreException, OsAccountManager.NotUserSIDException {
+		
+		if (referringHost == null) {
+			throw new TskCoreException("A referring host is required get a realm.");
+		}
+		
+		// need at least one of the two, the addr or name to look up
+		if (StringUtils.isBlank(accountSid) && StringUtils.isBlank(realmName)) {
+			throw new TskCoreException("Realm address or name is required get a realm.");
+		}
+		
+		// If an accountSID is provided search for realm by addr.
+		if (!Strings.isNullOrEmpty(accountSid)) {
+			
+			if (!WindowsAccountUtils.isWindowsUserSid(accountSid)) {
+				throw new OsAccountManager.NotUserSIDException(String.format("SID = %s is not a user SID.", accountSid ));
+			}
+			// get realm addr from the account SID.
+			String realmAddr = WindowsAccountUtils.getWindowsRealmAddress(accountSid);
+			Optional<OsAccountRealm> realm = getRealmByAddr(realmAddr, referringHost, connection);
+			if (realm.isPresent()) {
+				return realm;
+			}
+		}
+
+		// No realm addr so search by name.
+		Optional<OsAccountRealm> realm = getRealmByName(realmName, referringHost, connection);
+		if (realm.isPresent() && !Strings.isNullOrEmpty(accountSid)) {
+			// If we were given an accountSID, make sure there isn't one set on the matching realm.
+			// We know it won't match because the previous search by SID failed.
+			if (realm.get().getRealmAddr().isPresent()) {
+				return Optional.empty();
+			}
+		}
+		return realm;
+	}
+	
+	
+	/**
+	 * Get a windows realm by the account SID, or the domain name. The input SID
+	 * is an user/group account SID. The domain SID is extracted from this
+	 * incoming SID.
+	 *
+	 * If a realm is found but is missing either the SID or the realmName, then
+	 * the realm is updated.
+	 *
+	 * @param accountSid    Account SID, may be null.
+	 * @param realmName     Realm name, may be null only if accountSid is not
+	 *                      null.
+	 * @param referringHost Referring Host.
+	 * @param connection    Database connection to use.
+	 *
+	 * @return Optional with OsAccountRealm, Optional.empty if no matching realm
+	 *         is found.
+	 *
+	 * @throws TskCoreException
+	 */
+	Optional<OsAccountRealm> getAndUpdateWindowsRealm(String accountSid, String realmName, Host referringHost, CaseDbConnection connection) throws TskCoreException, OsAccountManager.NotUserSIDException {
+		
+		// get realm
+		Optional<OsAccountRealm> realmOptional =  getWindowsRealm(accountSid, realmName, referringHost, connection );
+		
+		// if found, update it if needed
+		if (realmOptional.isPresent()) {
+			String realmAddr = StringUtils.isNotBlank(accountSid) ? WindowsAccountUtils.getWindowsRealmAddress(accountSid) : null;
+			OsRealmUpdateResult realmUpdateResult = updateRealm(realmOptional.get(), realmAddr, realmName, connection);
+			
+			// if realm was updated, return the updated realm
+			if (realmUpdateResult.getUpdateStatus() == OsRealmUpdateStatus.UPDATED) {
+				return realmUpdateResult.getUpdatedRealm();
+			} 
+		} 
+		
+		return realmOptional; // return the found realm as is, if any
+	}
+	
+	
+	/**
+	 * Updates the realm address and/or name, if a non blank address/name is
+	 * specified and the current address/name is blank.
+	 * 
+	 * NOTE: This will not merge two realms if the updated information exists
+     * for another realm (i.e. such as adding an address to a realm that has
+     * only a name and there is already a realm with that address). 
+	 * 
+	 *
+	 * @param realm      Realm to update.
+	 * @param realmAddr  Realm address, may be null if the address doesn't need
+	 *                   to be updated.
+	 * @param realmName  Realm name, may be null if the name doesn't need to be
+	 *                   updated.
+	 * 
+	 * @return OsRealmUpdateResult Update status and updated realm.
+	 * 
+	 * @throws TskCoreException If there is a database error or if a realm
+     * already exists with that information. 
+	 */
+	public OsRealmUpdateResult updateRealm(OsAccountRealm realm, String realmAddr, String realmName) throws TskCoreException {
+		
+		try (CaseDbConnection connection = db.getConnection())  {
+			return updateRealm(realm, realmAddr, realmName, connection);
+		}
+	}
+		
+	/**
+	 * Updates the realm address and/or name, if a non blank address/name is
+	 * specified and the current address/name is blank.
+	 *
+	 * @param realm      Realm to update.
+	 * @param realmAddr  Realm address, may be null if the address doesn't need
+	 *                   to be updated.
+	 * @param realmName  Realm name, may be null if the name doesn't need to be
+	 *                   updated.
+	 * @param connection Current database connection.
+	 *
+	 * @return OsRealmUpdateResult Update status and updated realm.
+	 *
+	 * @throws TskCoreException If there is a database error or if a realm
+	 *                          already exists with that information.
+	 */
+	private OsRealmUpdateResult updateRealm(OsAccountRealm realm, String realmAddr, String realmName, CaseDbConnection connection) throws TskCoreException {
+
+		// need at least one of the two
+		if (StringUtils.isBlank(realmAddr) && StringUtils.isBlank(realmName)) {
+			throw new TskCoreException("Realm address or name is required to update realm.");
+		}
+
+		OsRealmUpdateStatus updateStatusCode = OsRealmUpdateStatus.NO_CHANGE;
+		OsAccountRealm updatedRealm = null;
+
+		db.acquireSingleUserCaseWriteLock();
+		try {
+			List<String> realmNames = realm.getRealmNames();
+			String currRealmName = realmNames.isEmpty() ? null : realmNames.get(0);	// currently there is only one name.
+			String currRealmAddr = realm.getRealmAddr().orElse(null);
+
+			// set name and address to new values only if the current value is blank and the new value isn't.		
+			if ((StringUtils.isBlank(currRealmAddr) && StringUtils.isNotBlank(realmAddr))) {
+				updateRealmColumn(realm.getRealmId(), "realm_addr", realmAddr, connection);
+				updateStatusCode = OsRealmUpdateStatus.UPDATED;
+			}
+
+			if (StringUtils.isBlank(currRealmName) && StringUtils.isNotBlank(realmName)) {
+				updateRealmColumn(realm.getRealmId(), "realm_name", realmName, connection);
+				updateStatusCode = OsRealmUpdateStatus.UPDATED;
+			}
+
+			// if nothing is to be changed, return
+			if (updateStatusCode == OsRealmUpdateStatus.NO_CHANGE) {
+				return new OsRealmUpdateResult(updateStatusCode, realm);
+			}
+
+			// update realm signature - based on the most current address and name
+			OsAccountRealm currRealm = getRealmByRealmId(realm.getRealmId(), connection);
+			String newRealmAddr = currRealm.getRealmAddr().orElse(null);
+			String newRealmName = (currRealm.getRealmNames().isEmpty() == false) ? currRealm.getRealmNames().get(0) : null;
+
+			// make new signature
+			String newSignature = makeRealmSignature(newRealmAddr, newRealmName, realm.getScopeHost().orElse(null));
+
+			// Use a random string as the signature if the realm is not active.
+			String updateSQL = "UPDATE tsk_os_account_realms SET  "
+					+ " realm_signature = "
+					+ "   CASE WHEN db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId() + " THEN ? ELSE realm_signature END "
+					+ " WHERE id = ?";
+			PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+
+			preparedStatement.setString(1, newSignature); // Is only set for active accounts
+			preparedStatement.setLong(2, realm.getRealmId());
+			connection.executeUpdate(preparedStatement);
+
+			// read the updated realm
+			updatedRealm = this.getRealmByRealmId(realm.getRealmId(), connection);
+
+			return new OsRealmUpdateResult(updateStatusCode, updatedRealm);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating realm with id = %d, name = %s, addr = %s", realm.getRealmId(), realmName != null ? realmName : "Null", realm.getRealmAddr().orElse("Null")), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+
+	}
+
+	/**
+	 * Updates specified column in the tsk_os_account_realms table to the specified value.
+	 * 
+	 * @param <T> Type of value - must be a String, Long or an Integer. 
+	 * @param realmId Id of the realm to be updated.
+	 * @param colName Name of column o be updated.
+	 * @param colValue New column value. 
+	 * @param connection Database connection to use.
+	 * 
+	 * @throws SQLException If there is an error updating the database.
+	 * @throws TskCoreException  If the value type is not handled.
+	 */
+	private <T> void updateRealmColumn(long realmId, String colName, T colValue, CaseDbConnection connection) throws SQLException, TskCoreException {
+
+		String updateSQL = "UPDATE tsk_os_account_realms "
+				+ " SET " + colName + " = ? "
+				+ " WHERE id = ?";
+
+		db.acquireSingleUserCaseWriteLock();
+		try {
+			PreparedStatement preparedStatement = connection.getPreparedStatement(updateSQL, Statement.NO_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+
+			if (Objects.isNull(colValue)) {
+				preparedStatement.setNull(1, Types.NULL); // handle null value
+			} else {
+				if (colValue instanceof String) {
+					preparedStatement.setString(1, (String) colValue);
+				} else if (colValue instanceof Long) {
+					preparedStatement.setLong(1, (Long) colValue);
+				} else if (colValue instanceof Integer) {
+					preparedStatement.setInt(1, (Integer) colValue);
+				} else {
+					throw new TskCoreException(String.format("Unhandled column data type received while updating the realm (id = %d) ", realmId));
+				}
+			}
+
+			preparedStatement.setLong(2, realmId);
+
+			connection.executeUpdate(preparedStatement);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+	
+	private final static String REALM_QUERY_STRING = "SELECT realms.id as realm_id, realms.realm_name as realm_name,"
+			+ " realms.realm_addr as realm_addr, realms.realm_signature as realm_signature, realms.scope_host_id, realms.scope_confidence, realms.db_status,"
+			+ " hosts.id, hosts.name as host_name "
+			+ " FROM tsk_os_account_realms as realms"
+			+ "		LEFT JOIN tsk_hosts as hosts"
+			+ " ON realms.scope_host_id = hosts.id";
+	
+    /**
+	 * Get the realm from the given row id. 
+	 * 
+	 * @param id Realm row id.
+	 * 
+	 * @return Realm. 
+	 * @throws TskCoreException on error 
+	 */
+
+	public OsAccountRealm getRealmByRealmId(long id) throws TskCoreException {
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			return getRealmByRealmId(id, connection);
+		}
+	}
+	
+	/**
+	 * Get the realm from the given row id. 
+	 * 
+	 * @param id Realm row id.
+	 * @param connection Database connection to use.
+	 * 
+	 * @return Realm. 
+	 * @throws TskCoreException 
+	 */
+	OsAccountRealm getRealmByRealmId(long id, CaseDbConnection connection) throws TskCoreException {
+		
+		String queryString = REALM_QUERY_STRING
+					+ " WHERE realms.id = " + id;
+		
+		db.acquireSingleUserCaseReadLock();
+		try (	Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+			OsAccountRealm accountRealm = null;
+			if (rs.next()) { 
+				accountRealm = resultSetToAccountRealm(rs);
+			} else {
+				throw new TskCoreException(String.format("No realm found with id = %d", id));
+			}
+
+			return accountRealm;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error running the realms query = %s", queryString), ex);
+		}
+		finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+	
+	/**
+	 * Get the realm with the given realm address.
+	 * 
+	 * @param realmAddr Realm address.
+	 * @param host Host for realm, may be null.
+	 * @param connection Database connection to use.
+	 * 
+	 * @return Optional with OsAccountRealm, Optional.empty if no realm found with matching real address.
+	 * 
+	 * @throws TskCoreException.
+	 */
+	Optional<OsAccountRealm> getRealmByAddr(String realmAddr, Host host, CaseDbConnection connection) throws TskCoreException {
+		
+		// If a host is specified, we want to match the realm with matching addr and specified host, or a realm with matching addr and no host.
+		// If no host is specified, then we return the first realm with matching addr.
+		String whereHostClause = (host == null) 
+							? " 1 = 1 " 
+							: " ( realms.scope_host_id = " + host.getHostId() + " OR realms.scope_host_id IS NULL) ";
+		String queryString = REALM_QUERY_STRING
+						+ " WHERE LOWER(realms.realm_addr) = LOWER('"+ realmAddr + "') "
+						+ " AND " + whereHostClause
+				        + " AND realms.db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId()
+						+ " ORDER BY realms.scope_host_id IS NOT NULL, realms.scope_host_id";	// ensure that non null host_id is at the front
+				    
+		db.acquireSingleUserCaseReadLock();
+		try (	Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			OsAccountRealm accountRealm = null;
+			if (rs.next()) {
+				Host realmHost = null;
+				long hostId = rs.getLong("scope_host_id");
+				if (!rs.wasNull()) {
+					if (host != null ) {
+						realmHost = host; // exact match on given host
+					} else {
+						realmHost = new Host(hostId, rs.getString("host_name"));
+					}
+				}
+				
+				accountRealm = new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), 
+												rs.getString("realm_addr"), rs.getString("realm_signature"), 
+												realmHost, ScopeConfidence.fromID(rs.getInt("scope_confidence")),
+												OsAccountRealm.RealmDbStatus.fromID(rs.getInt("db_status")));
+			} 
+			return Optional.ofNullable(accountRealm);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error running the realms query = %s with realmaddr = %s and host name = %s",
+					queryString, realmAddr, (host != null ? host.getName() : "Null")), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+	
+	/**
+	 * Get the realm with the given name and specified host.
+	 * 
+	 * @param realmName Realm name.
+	 * @param host Host for realm, may be null.
+	 * @param connection Database connection to use.
+	 * 
+	 * @return Optional with OsAccountRealm, Optional.empty if no matching realm is found.
+	 * @throws TskCoreException.
+	 */
+	Optional<OsAccountRealm> getRealmByName(String realmName, Host host, CaseDbConnection connection) throws TskCoreException {
+		
+		// If a host is specified, we want to match the realm with matching name and specified host, or a realm with matching name and no host.
+		// If no host is specified, then we return the first realm with matching name.
+		String whereHostClause = (host == null)
+				? " 1 = 1 "
+				: " ( realms.scope_host_id = " + host.getHostId() + " OR realms.scope_host_id IS NULL ) ";
+		String queryString = REALM_QUERY_STRING
+				+ " WHERE LOWER(realms.realm_name) = LOWER('" + realmName + "')"
+				+ " AND " + whereHostClause
+				+ " AND realms.db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId()
+				+ " ORDER BY realms.scope_host_id IS NOT NULL, realms.scope_host_id";	// ensure that non null host_id are at the front
+
+		db.acquireSingleUserCaseReadLock();
+		try (Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+			
+			OsAccountRealm accountRealm = null;
+			if (rs.next()) {
+				Host realmHost = null;
+				long hostId = rs.getLong("scope_host_id");
+				if (!rs.wasNull()) {
+					if (host != null ) {
+						realmHost = host;
+					} else {
+						realmHost = new Host(hostId, rs.getString("host_name"));
+					}
+				}
+				
+				accountRealm = new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), 
+												rs.getString("realm_addr"), rs.getString("realm_signature"), 
+												realmHost, ScopeConfidence.fromID(rs.getInt("scope_confidence")),
+												OsAccountRealm.RealmDbStatus.fromID(rs.getInt("db_status")));
+				
+			} 
+			return Optional.ofNullable(accountRealm);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting account realm for with name = %s", realmName), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+	
+	/**
+	 * Check is there is any realm with a host-scope and KNOWN confidence for the given host.  
+	 * If we can assume that a host will have only a single host-scoped realm, then you can 
+	 * assume a new realm is domain-scoped when this method returns true.  I.e. once we know
+	 * the host-scoped realm, then everything else is domain-scoped. 
+	 * 
+	 * @param host Host for which to look for a realm.
+	 * 
+	 * @return True if there exists a a realm with the host scope matching the host. False otherwise
+	 */
+	private boolean isHostRealmKnown(Host host) throws TskCoreException {
+	
+		// check if this host has a local known realm aleady, other than the special windows realm.
+		String queryString = REALM_QUERY_STRING
+				+ " WHERE realms.scope_host_id = " + host.getHostId()
+				+ " AND realms.scope_confidence = " + OsAccountRealm.ScopeConfidence.KNOWN.getId()
+				+ " AND realms.db_status = " + OsAccountRealm.RealmDbStatus.ACTIVE.getId()
+				+ " AND LOWER(realms.realm_addr) <> LOWER('"+ WindowsAccountUtils.SPECIAL_WINDOWS_REALM_ADDR + "') ";
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+			
+			// return true if there is any match.
+			return rs.next();
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting account realm for with host = %s", host.getName()), ex);
+		}
+		finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+
+	}
+
+	/**
+	 * Creates a OsAccountRealm from the resultset of a REALM_QUERY_STRING query.
+	 * 
+	 * @param rs ResultSet
+	 * @return
+	 * @throws SQLException 
+	 */
+	private OsAccountRealm resultSetToAccountRealm(ResultSet rs) throws SQLException {
+		
+		long hostId = rs.getLong("scope_host_id");
+		Host realmHost = null;
+		if (!rs.wasNull()) {
+			realmHost = new Host(hostId, rs.getString("host_name"));
+		}
+
+		return new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"), 
+												rs.getString("realm_addr"), rs.getString("realm_signature"), 
+												realmHost, ScopeConfidence.fromID(rs.getInt("scope_confidence")),
+												OsAccountRealm.RealmDbStatus.fromID(rs.getInt("db_status")));
+	}
+	
+//	/**
+//	 * Get all realms.
+//	 * 
+//	 * @return Collection of OsAccountRealm
+//	 */
+//	Collection<OsAccountRealm> getRealms() throws TskCoreException {
+//		String queryString = "SELECT realms.id as realm_id, realms.realm_name as realm_name, realms.realm_addr as realm_addr, realms.scope_host_id, realms.scope_confidence, "
+//				+ " hosts.id, hosts.name as host_name "
+//				+ " FROM tsk_os_account_realms as realms"
+//				+ "		LEFT JOIN tsk_hosts as hosts"
+//				+ " ON realms.scope_host_id = hosts.id";
+//
+//		db.acquireSingleUserCaseReadLock();
+//		try (CaseDbConnection connection = this.db.getConnection();
+//				Statement s = connection.createStatement();
+//				ResultSet rs = connection.executeQuery(s, queryString)) {
+//
+//			ArrayList<OsAccountRealm> accountRealms = new ArrayList<>();
+//			while (rs.next()) {
+//				long hostId = rs.getLong("scope_host_id");
+//				Host host = null;
+//				if (!rs.wasNull()) {
+//					host = new Host(hostId, rs.getString("host_name"));
+//				}
+//
+//				accountRealms.add(new OsAccountRealm(rs.getLong("realm_id"), rs.getString("realm_name"),
+//						ScopeConfidence.fromID(rs.getInt("scope_confidence")),
+//						rs.getString("realm_addr"), host));
+//			}
+//
+//			return accountRealms;
+//		} catch (SQLException ex) {
+//			throw new TskCoreException(String.format("Error running the realms query = %s", queryString), ex);
+//		}
+//		finally {
+//			db.releaseSingleUserCaseReadLock();
+//		}
+//	}
+	
+	
+	/**
+	 * Adds a row to the realms table.
+	 * 
+	 * If the add fails, it tries to get the realm, in case the realm already exists.
+	 *
+	 * @param realmName       Realm name, may be null.
+	 * @param realmAddr       SID or some other identifier. May be null if name
+	 *                        is not null.
+	 * @param signature       Signature, either the address or the name.
+	 * @param host            Host, if the realm is host scoped. Can be null
+	 *                        realm is domain scoped.
+	 * @param scopeConfidence Confidence in realm scope.
+	 *
+	 * @return OsAccountRealm Realm just created.
+	 *
+	 * @throws TskCoreException If there is an internal error.
+	 */
+	private OsAccountRealm newRealm(String realmName, String realmAddr, String signature, Host host, OsAccountRealm.ScopeConfidence scopeConfidence) throws TskCoreException {
+
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			String realmInsertSQL = "INSERT INTO tsk_os_account_realms(realm_name, realm_addr, realm_signature, scope_host_id, scope_confidence)"
+					+ " VALUES (?, ?, ?, ?, ?)"; // NON-NLS
+
+			PreparedStatement preparedStatement = connection.getPreparedStatement(realmInsertSQL, Statement.RETURN_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+
+			preparedStatement.setString(1, realmName);
+			preparedStatement.setString(2, realmAddr);
+			preparedStatement.setString(3, signature);
+			if (host != null) {
+				preparedStatement.setLong(4, host.getHostId());
+			} else {
+				preparedStatement.setNull(4, java.sql.Types.BIGINT);
+			}
+			preparedStatement.setInt(5, scopeConfidence.getId());
+
+			connection.executeUpdate(preparedStatement);
+
+			// Read back the row id
+			try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) {
+				long rowId = resultSet.getLong(1); // last_insert_rowid()
+				return new OsAccountRealm(rowId, realmName, realmAddr, signature, host, scopeConfidence, OsAccountRealm.RealmDbStatus.ACTIVE);
+			}
+
+		} catch (SQLException ex) {
+			// Create may have failed if the realm already exists. Try and get the matching realm 
+			try (CaseDbConnection connection = this.db.getConnection()) {
+				if (!Strings.isNullOrEmpty(realmAddr)) {
+					Optional<OsAccountRealm> accountRealm = this.getRealmByAddr(realmAddr, host, connection);
+					if (accountRealm.isPresent()) {
+						return accountRealm.get();
+					}
+				} else if (!Strings.isNullOrEmpty(realmName)) {
+					Optional<OsAccountRealm> accountRealm = this.getRealmByName(realmName, host, connection);
+					if (accountRealm.isPresent()) {
+						return accountRealm.get();
+					}
+				}
+
+				// some other failure - throw an exception
+				throw new TskCoreException(String.format("Error creating realm with address = %s and name = %s, with host = %s",
+						realmAddr != null ? realmAddr : "", realmName != null ? realmName : "", host != null ? host.getName() : ""), ex);
+			}
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+	
+
+	/**
+	 * Makes a realm signature based on given realm address, name scope host.
+	 *
+	 * The signature is  primarily to provide uniqueness in the database.
+	 * 
+	 * Signature is built as:
+	 *  (addr|name)_(hostId|"DOMAIN")
+	 *
+	 * @param realmAddr Realm address, may be null.
+	 * @param realmName Realm name, may be null only if address is not null.
+	 * @param scopeHost Realm scope host. May be null.
+	 * 
+	 * @return Realm Signature.
+	 * 
+	 * @throws TskCoreException If there is an error making the signature.
+	 */
+	static String makeRealmSignature(String realmAddr, String realmName, Host scopeHost) throws TskCoreException {
+
+		// need at least one of the two, the addr or name to look up
+		if (Strings.isNullOrEmpty(realmAddr) && Strings.isNullOrEmpty(realmName)) {
+			throw new TskCoreException("Realm address and name can't both be null.");
+		}
+		
+		String signature = String.format("%s_%s", !Strings.isNullOrEmpty(realmAddr) ?  realmAddr : realmName,
+												scopeHost != null ? scopeHost.getHostId() : "DOMAIN");
+		return signature;
+	}
+	
+	/**
+	 * Create a random signature for realms that have been merged.
+	 * 
+	 * @return The random signature.
+	 */
+	private String makeMergedRealmSignature() {
+		return "MERGED " +  UUID.randomUUID().toString();
+	}
+	
+	
+	/**
+	 * Move source realm into the destination host or merge with an existing realm.
+	 * 
+	 * @param sourceRealm
+	 * @param destHost
+	 * @param trans
+	 * @throws TskCoreException 
+	 */
+	void moveOrMergeRealm(OsAccountRealm sourceRealm, Host destHost, CaseDbTransaction trans) throws TskCoreException {
+		// Look for a matching realm by address
+		Optional<OsAccountRealm> optDestRealmAddr = Optional.empty();
+		if (sourceRealm.getRealmAddr().isPresent()) {
+			optDestRealmAddr = db.getOsAccountRealmManager().getRealmByAddr(sourceRealm.getRealmAddr().get(), destHost, trans.getConnection());
+		}
+		
+		// Look for a matching realm by name
+		Optional<OsAccountRealm> optDestRealmName = Optional.empty();
+		if (!sourceRealm.getRealmNames().isEmpty()) {
+			optDestRealmName = db.getOsAccountRealmManager().getRealmByName(sourceRealm.getRealmNames().get(0), destHost, trans.getConnection());
+		}
+		
+		// Decide how to proceed:
+		// - If we only got one match:
+		// -- If the address matched, set destRealm to the matching address realm
+		// -- If the name matched but the original and the matching realm have different addresses, leave destRealm null (it'll be a move)
+		// -- If the name matched and at least one of the address fields was null, set destRealm to the matching name realm
+		// - If we got no matches, leave destRealm null (we'll do a move not a merge)
+		// - If we got two of the same matches, set destRealm to that realm
+		// - If we got two different matches:
+		// -- If the name match has no address set, merge the matching name realm into the matching address realm, then
+		//        set destRealm to the matching address realm
+		// -- Otherwise we're in the case where the addresses are different. We will consider the address the 
+		//        stronger match and set destRealm to the matching address realm and leave the matching name realm as-is.		
+		OsAccountRealm destRealm = null;
+		if (optDestRealmAddr.isPresent() && optDestRealmName.isPresent()) {
+			if (optDestRealmAddr.get().getRealmId() == optDestRealmName.get().getRealmId()) {
+				// The two matches are the same
+				destRealm = optDestRealmAddr.get();
+			} else {
+				if (optDestRealmName.get().getRealmAddr().isPresent()) {
+					// The addresses are different, so use the one with the matching address
+					destRealm = optDestRealmAddr.get();
+				} else {
+					// Merge the realm with the matching name into the realm with the matching address.
+					// Reload from database afterward to make sure everything is up-to-date.
+					mergeRealms(optDestRealmName.get(), optDestRealmAddr.get(), trans);
+					destRealm = getRealmByRealmId(optDestRealmAddr.get().getRealmId(), trans.getConnection());
+				}
+			}
+		} else if (optDestRealmAddr.isPresent()) {
+			// Only address matched - use it
+			destRealm = optDestRealmAddr.get();
+		} else if (optDestRealmName.isPresent()) {
+			// Only name matched - check whether both have addresses set.
+			// Due to earlier checks we know the address fields can't be the same, so
+			// don't do anything if both have addresses - we consider the address to be a stronger identifier than the name
+			if (! (optDestRealmName.get().getRealmAddr().isPresent() && sourceRealm.getRealmAddr().isPresent())) {
+				destRealm = optDestRealmName.get();
+			}
+		}
+		
+		// Move or merge the source realm
+		if (destRealm == null) {
+			moveRealm(sourceRealm, destHost, trans);
+		} else {
+			mergeRealms(sourceRealm, destRealm, trans);
+		}
+	}
+	
+	/**
+	 * Move a realm to a different host.
+	 * A check should be done to make sure there are no matching realms in
+	 * the destination host before calling this method.
+	 * 
+	 * @param sourceRealm The source realm.
+	 * @param destHost    The destination host.
+	 * @param trans       The open transaction.
+	 * 
+	 * @throws TskCoreException 
+	 */
+	private void moveRealm(OsAccountRealm sourceRealm, Host destHost, CaseDbTransaction trans) throws TskCoreException {
+		try(Statement s = trans.getConnection().createStatement()) {
+			String query = "UPDATE tsk_os_account_realms SET scope_host_id = " + destHost.getHostId() + " WHERE id = " + sourceRealm.getRealmId();
+			s.executeUpdate(query);
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error moving realm with id: " + sourceRealm.getRealmId() + " to host with id: " + destHost.getHostId(), ex);
+		}
+	}
+	
+	
+	/**
+	 * Merge one realm into another, moving or combining all associated OsAccounts.
+	 * 
+	 * @param sourceRealm The sourceRealm realm.
+	 * @param destRealm   The destination realm.
+	 * @param trans  The open transaction.
+	 * 
+	 * @throws TskCoreException 
+	 */
+	void mergeRealms(OsAccountRealm sourceRealm, OsAccountRealm destRealm, CaseDbTransaction trans) throws TskCoreException {
+
+		// Update accounts
+		db.getOsAccountManager().mergeOsAccountsForRealms(sourceRealm, destRealm, trans);
+
+		// Update the sourceRealm realm
+		CaseDbConnection connection = trans.getConnection();
+		try (Statement statement = connection.createStatement()) {
+			String updateStr = "UPDATE tsk_os_account_realms SET db_status = " + OsAccountRealm.RealmDbStatus.MERGED.getId() 
+					+ ", merged_into = " + destRealm.getRealmId()
+					+ ", realm_signature = '" + makeMergedRealmSignature() + "' "
+					+ " WHERE id = " + sourceRealm.getRealmId();
+			connection.executeUpdate(statement, updateStr);
+		} catch (SQLException ex) {
+			throw new TskCoreException ("Error updating status of realm with id: " + sourceRealm.getRealmId(), ex);
+		}
+		
+		// Update the destination realm if it doesn't have the name or addr set and the source realm does
+		if (!destRealm.getRealmAddr().isPresent() && sourceRealm.getRealmAddr().isPresent()) {
+			updateRealm(destRealm, sourceRealm.getRealmAddr().get(), null, trans.getConnection());
+		} else if (destRealm.getRealmNames().isEmpty() && !sourceRealm.getRealmNames().isEmpty()) {
+			updateRealm(destRealm, null, sourceRealm.getRealmNames().get(0), trans.getConnection());
+		}
+	}
+	
+	/**
+	 * Get all realms associated with the given host.
+	 * 
+	 * @param host       The host.
+	 * @param connection The current database connection.
+	 * 
+	 * @return List of realms for the given host.
+	 * 
+	 * @throws TskCoreException 
+	 */
+	List<OsAccountRealm> getRealmsByHost(Host host, CaseDbConnection connection) throws TskCoreException {
+		List<OsAccountRealm> results = new ArrayList<>();
+		String queryString = REALM_QUERY_STRING
+			+ " WHERE realms.scope_host_id = " + host.getHostId();
+		
+		db.acquireSingleUserCaseReadLock();
+		try (	Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+			while (rs.next()) { 
+				results.add(resultSetToAccountRealm(rs));
+			} 
+			return results;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error gettings realms for host with id = " + host.getHostId()), ex);
+		}
+		finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+	
+	/**
+	 * Status of a realm update.
+	 */
+	public enum OsRealmUpdateStatus {
+
+		NO_CHANGE,	/// no change was made to account.
+		UPDATED,	/// account was updated
+		MERGED		/// account update triggered a merge
+	}
+	
+	/**
+	 * Container to encapsulate the status returned by the realm update api, and
+	 * the updated realm.
+	 */
+	public final static class OsRealmUpdateResult {
+		
+		private final OsRealmUpdateStatus updateStatus;
+		private final OsAccountRealm updatedRealm;
+		
+		OsRealmUpdateResult(OsRealmUpdateStatus updateStatus, OsAccountRealm updatedRealm) {
+			this.updateStatus = updateStatus;
+			this.updatedRealm = updatedRealm;
+		}
+
+		public OsRealmUpdateStatus getUpdateStatus() {
+			return updateStatus;
+		}
+
+		public Optional<OsAccountRealm> getUpdatedRealm() {
+			return Optional.ofNullable(updatedRealm);
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Person.java b/bindings/java/src/org/sleuthkit/datamodel/Person.java
new file mode 100644
index 0000000000000000000000000000000000000000..0d3e94230f92d3c37dc80408742d1cf2cd00d4a1
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/Person.java
@@ -0,0 +1,95 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Objects;
+
+/**
+ * Encapsulates a person.
+ */
+public final class Person {
+
+	private final long id;
+	private String name;
+
+	Person(long id, String name) {
+		this.id = id;
+		this.name = name;
+	}
+
+	/**
+	 * Gets the row id for the person.
+	 *
+	 * @return Row id.
+	 */
+	public long getPersonId() {
+		return id;
+	}
+
+	/**
+	 * Gets the name for the person.
+	 *
+	 * @return Person name.
+	 */
+	public String getName() {
+		return name;
+	}
+
+	/**
+	 * Sets the name for the person. Does not update the database.
+	 *
+	 * @param newName The new name.
+	 */
+	public void setName(String newName) {
+		this.name = newName;
+	}
+
+	@Override
+	public int hashCode() {
+		int hash = 5;
+		hash = 67 * hash + (int) (this.id ^ (this.id >>> 32));
+		hash = 67 * hash + Objects.hashCode(this.name);
+		return hash;
+	}
+
+	@Override
+	public boolean equals(Object obj) {
+		if (this == obj) {
+			return true;
+		}
+		if (obj == null) {
+			return false;
+		}
+		if (getClass() != obj.getClass()) {
+			return false;
+		}
+
+		final Person other = (Person) obj;
+		if (this.id != other.id) {
+			return false;
+		}
+
+		if ((this.name == null) ? (other.name != null) : !this.name.equals(other.name)) {
+			return false;
+		}
+
+		return true;
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/PersonManager.java b/bindings/java/src/org/sleuthkit/datamodel/PersonManager.java
new file mode 100755
index 0000000000000000000000000000000000000000..8bae31b022b3334270f8fb40fcbbed895c1bda66
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/PersonManager.java
@@ -0,0 +1,464 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.base.Strings;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.TskEvent.PersonsAddedTskEvent;
+
+/**
+ * Responsible for creating/updating/retrieving Persons.
+ */
+public final class PersonManager {
+
+	private final SleuthkitCase db;
+
+	/**
+	 * Construct a PersonManager for the given SleuthkitCase.
+	 *
+	 * @param skCase The SleuthkitCase
+	 *
+	 */
+	PersonManager(SleuthkitCase skCase) {
+		this.db = skCase;
+	}
+
+	/**
+	 * Get all persons in the database.
+	 *
+	 * @return List of persons
+	 *
+	 * @throws TskCoreException
+	 */
+	public List<Person> getPersons() throws TskCoreException {
+		String queryString = "SELECT * FROM tsk_persons";
+
+		List<Person> persons = new ArrayList<>();
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			while (rs.next()) {
+				persons.add(new Person(rs.getLong("id"), rs.getString("name")));
+			}
+
+			return persons;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting persons"), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Update the database to match the given Person.
+	 *
+	 * @param person The person to update.
+	 *
+	 * @return person The person that was updated.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Person updatePerson(Person person) throws TskCoreException {
+
+		// Must have a non-empty name
+		if (Strings.isNullOrEmpty(person.getName())) {
+			throw new TskCoreException("Illegal argument passed to updatePerson: Name field for person with ID " + person.getPersonId() + " is null/empty. Will not update database.");
+		}
+
+		String queryString = "UPDATE tsk_persons"
+				+ " SET name = ? WHERE id = " + person.getPersonId();
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			PreparedStatement s = connection.getPreparedStatement(queryString, Statement.NO_GENERATED_KEYS);
+			s.clearParameters();
+			s.setString(1, person.getName());
+			s.executeUpdate();
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating person with id = %d", person.getPersonId()), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+
+		db.fireTSKEvent(new TskEvent.PersonsUpdatedTskEvent(Collections.singletonList(person)));
+		return person;
+	}
+
+	/**
+	 * Delete a person. Name comparison is case-insensitive.
+	 *
+	 * @param name Name of the person to delete
+	 *
+	 * @throws TskCoreException
+	 */
+	public void deletePerson(String name) throws TskCoreException {
+		String queryString = "DELETE FROM tsk_persons"
+				+ " WHERE LOWER(name) = LOWER(?)";
+
+		Person deletedPerson = null;
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			PreparedStatement s = connection.getPreparedStatement(queryString, Statement.RETURN_GENERATED_KEYS);
+			s.clearParameters();
+			s.setString(1, name);
+			s.executeUpdate();
+
+			try (ResultSet resultSet = s.getGeneratedKeys()) {
+				if (resultSet.next()) {
+					deletedPerson = new Person(resultSet.getLong(1), name);
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error deleting person with name %s", name), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+
+		if (deletedPerson != null) {
+			db.fireTSKEvent(new TskEvent.PersonsDeletedTskEvent(Collections.singletonList(deletedPerson.getPersonId())));
+		}
+	}
+
+	/**
+	 * Get person with given name. Name comparison is case-insensitive.
+	 *
+	 * @param name Person name to look for.
+	 *
+	 * @return Optional with person. Optional.empty if no matching person is
+	 *         found.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Optional<Person> getPerson(String name) throws TskCoreException {
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection()) {
+			return getPerson(name, connection);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get person with given id.
+	 *
+	 * @param id Id of the person to look for.
+	 *
+	 * @return Optional with person. Optional.empty if no matching person is
+	 *         found.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Optional<Person> getPerson(long id) throws TskCoreException {
+		String queryString = "SELECT * FROM tsk_persons WHERE id = " + id;
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (rs.next()) {
+				return Optional.of(new Person(rs.getLong("id"), rs.getString("name")));
+			} else {
+				return Optional.empty();
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting persons"), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Create a person with specified name. If a person already exists with the
+	 * given name, it returns the existing person. Name comparison is
+	 * case-insensitive.
+	 *
+	 * @param name	Person name.
+	 *
+	 * @return Person with the specified name.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Person newPerson(String name) throws TskCoreException {
+
+		// Must have a name
+		if (Strings.isNullOrEmpty(name)) {
+			throw new TskCoreException("Illegal argument passed to createPerson: Non-empty name is required.");
+		}
+
+		Person toReturn = null;
+		CaseDbConnection connection = null;
+		db.acquireSingleUserCaseWriteLock();
+		try {
+			connection = db.getConnection();
+		
+			// First try to load it from the database. This is a case-insensitive look-up
+			// to attempt to prevent having two entries with the same lower-case name.
+			Optional<Person> person = getPerson(name, connection);
+			if (person.isPresent()) {
+				return person.get();
+			}
+
+			// Attempt to insert the new Person.
+			String personInsertSQL = "INSERT INTO tsk_persons(name) VALUES (?)"; // NON-NLS
+			PreparedStatement preparedStatement = connection.getPreparedStatement(personInsertSQL, Statement.RETURN_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+			preparedStatement.setString(1, name);
+			connection.executeUpdate(preparedStatement);
+
+			// Read back the row id.
+			try (ResultSet resultSet = preparedStatement.getGeneratedKeys();) {
+				if (resultSet.next()) {
+					toReturn = new Person(resultSet.getLong(1), name); //last_insert_rowid()
+				} else {
+					throw new SQLException("Error executing SQL: " + personInsertSQL);
+				}
+			}
+		} catch (SQLException ex) {
+			if (connection != null) {
+				// The insert may have failed because this person was just added on another thread, so try getting the person again.
+				// (Note: the SingleUserCaseWriteLock is a no-op for multi-user cases so acquiring it does not prevent this situation)
+				Optional<Person> person = getPerson(name, connection);
+				if (person.isPresent()) {
+					return person.get();
+				}
+			}
+			throw new TskCoreException(String.format("Error adding person with name = %s", name), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+
+		if (toReturn != null) {
+			db.fireTSKEvent(new PersonsAddedTskEvent(Collections.singletonList(toReturn)));
+		}
+		return toReturn;
+	}
+
+	/**
+	 * Get all hosts associated with the given person.
+	 *
+	 * @param person The person.
+	 *
+	 * @return The list of hosts corresponding to the person.
+	 *
+	 * @throws TskCoreException Thrown if there is an issue querying the case
+	 *                          database.
+	 */
+	public List<Host> getHostsForPerson(Person person) throws TskCoreException {
+		return executeHostsQuery("SELECT * FROM tsk_hosts WHERE person_id = " + person.getPersonId());
+	}
+
+	/**
+	 * Gets all hosts not associated with any person.
+	 *
+	 * @return The hosts.
+	 *
+	 * @throws TskCoreException Thrown if there is an issue querying the case
+	 *                          database.
+	 */
+	public List<Host> getHostsWithoutPersons() throws TskCoreException {
+		return executeHostsQuery("SELECT * FROM tsk_hosts WHERE person_id IS NULL");
+	}
+
+	/**
+	 * Executes a query of the tsk_hosts table in the case database.
+	 *
+	 * @param hostsQuery The SQL query to execute.
+	 *
+	 * @throws TskCoreException Thrown if there is an issue querying the case
+	 *                          database.
+	 *
+	 * @throws TskCoreException
+	 */
+	private List<Host> executeHostsQuery(String hostsQuery) throws TskCoreException {
+		String sql = hostsQuery + " AND db_status = " + Host.HostDbStatus.ACTIVE.getId();
+		List<Host> hosts = new ArrayList<>();
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, sql)) {
+			while (rs.next()) {
+				hosts.add(new Host(rs.getLong("id"), rs.getString("name"), Host.HostDbStatus.fromID(rs.getInt("db_status"))));
+			}
+			return hosts;
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error executing '" + sql + "'"), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get person with given name. Name comparison is case-insensitive.
+	 *
+	 * @param name       Person name to look for.
+	 * @param connection Database connection to use.
+	 *
+	 * @return Optional with person. Optional.empty if no matching person is
+	 *         found.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Optional<Person> getPerson(String name, CaseDbConnection connection) throws TskCoreException {
+
+		String queryString = "SELECT * FROM tsk_persons"
+				+ " WHERE LOWER(name) = LOWER(?)";
+		try {
+			PreparedStatement s = connection.getPreparedStatement(queryString, Statement.RETURN_GENERATED_KEYS);
+			s.clearParameters();
+			s.setString(1, name);
+
+			try (ResultSet rs = s.executeQuery()) {
+				if (!rs.next()) {
+					return Optional.empty();	// no match found
+				} else {
+					return Optional.of(new Person(rs.getLong("id"), rs.getString("name")));
+				}
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting person with name = %s", name), ex);
+		}
+	}
+
+	/**
+	 * Get person for the given host or empty if no associated person.
+	 *
+	 * @param host The host.
+	 *
+	 * @return The parent person or empty if no parent person.
+	 *
+	 * @throws TskCoreException if error occurs.
+	 */
+	public Optional<Person> getPerson(Host host) throws TskCoreException {
+
+		String queryString = "SELECT p.id AS personId, p.name AS name FROM \n"
+				+ "tsk_persons p INNER JOIN tsk_hosts h\n"
+				+ "ON p.id = h.person_id \n"
+				+ "WHERE h.id = " + host.getHostId();
+
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = this.db.getConnection();
+				Statement s = connection.createStatement();
+				ResultSet rs = connection.executeQuery(s, queryString)) {
+
+			if (rs.next()) {
+				return Optional.of(new Person(rs.getLong("personId"), rs.getString("name")));
+			} else {
+				return Optional.empty();
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error getting person for host with ID = %d", host.getHostId()), ex);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Adds one or more hosts to a person.
+	 *
+	 * @param person The person.
+	 * @param hosts  The hosts.
+	 *
+	 * @throws TskCoreException Thrown if the operation cannot be completed.
+	 */
+	public void addHostsToPerson(Person person, List<Host> hosts) throws TskCoreException {
+		if (person == null) {
+			throw new TskCoreException("Illegal argument: person must be non-null");
+		}
+		if (hosts == null || hosts.isEmpty()) {
+			throw new TskCoreException("Illegal argument: hosts must be non-null and non-empty");
+		}
+		executeHostsUpdate(person, getHostIds(hosts), new TskEvent.HostsAddedToPersonTskEvent(person, hosts));
+	}
+
+	/**
+	 * Removes one or more hosts from a person.
+	 *
+	 * @param person The person.
+	 * @param hosts  The hosts.
+	 *
+	 * @throws TskCoreException Thrown if the operation cannot be completed.
+	 */
+	public void removeHostsFromPerson(Person person, List<Host> hosts) throws TskCoreException {
+		if (person == null) {
+			throw new TskCoreException("Illegal argument: person must be non-null");
+		}
+		if (hosts == null || hosts.isEmpty()) {
+			throw new TskCoreException("Illegal argument: hosts must be non-null and non-empty");
+		}
+		List<Long> hostIds = getHostIds(hosts);
+		executeHostsUpdate(null, hostIds, new TskEvent.HostsRemovedFromPersonTskEvent(person, hostIds));
+	}
+
+	/**
+	 * Executes an update of the person_id column for one or more hosts in the
+	 * tsk_hosts table in the case database.
+	 *
+	 * @param person  The person to get the person ID from or null if the person
+	 *                ID of the hosts should be set to NULL.
+	 * @param hostIds The host IDs of the hosts.
+	 * @param event   A TSK event to be published if the update succeeds.
+	 *
+	 * @throws TskCoreException Thrown if the update fails.
+	 */
+	private void executeHostsUpdate(Person person, List<Long> hostIds, TskEvent event) throws TskCoreException {
+		String updateSql = null;
+		db.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = this.db.getConnection(); Statement statement = connection.createStatement()) {
+			updateSql = (person == null)
+					? String.format("UPDATE tsk_hosts SET person_id = NULL")
+					: String.format("UPDATE tsk_hosts SET person_id = %d", person.getPersonId());
+			String hostIdsCsvList = hostIds.stream()
+					.map(hostId -> hostId.toString())
+					.collect(Collectors.joining(","));
+			updateSql += " WHERE id IN (" + hostIdsCsvList + ")";
+			statement.executeUpdate(updateSql);
+			db.fireTSKEvent(event);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format(updateSql == null ? "Error connecting to case database" : "Error executing '" + updateSql + "'"), ex);
+		} finally {
+			db.releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Gets a list of host IDs from a list of hosts.
+	 *
+	 * @param hosts The hosts.
+	 *
+	 * @return The host IDs.
+	 */
+	private List<Long> getHostIds(List<Host> hosts) {
+		List<Long> hostIds = new ArrayList<>();
+		hostIds.addAll(hosts.stream()
+				.map(host -> host.getHostId())
+				.collect(Collectors.toList()));
+		return hostIds;
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Report.java b/bindings/java/src/org/sleuthkit/datamodel/Report.java
index bf70b73924b2149343cf355517fda8ec49e34f9d..51d6c930e75826befbcb324e33bd120d0f145d7a 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Report.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Report.java
@@ -26,17 +26,21 @@
 import java.nio.file.Paths;
 import static java.nio.file.StandardOpenOption.READ;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 
 /**
  * This is a class that models reports.
  */
 public class Report implements Content {
 
+	private static final BlackboardArtifact.Type KEYWORD_HIT_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT);
 	static long ID_NOT_SET = -1;
 	private long objectId = ID_NOT_SET;
 	private final String pathAsString;
@@ -59,7 +63,7 @@ public class Report implements Content {
 	 * @param path        Absolute path to report.
 	 * @param createdTime Created time of report (in UNIX epoch time).
 	 * @param reportName  May be empty
-	 * @param parent	  The parent/source of the Report.
+	 * @param parent	     The parent/source of the Report.
 	 */
 	Report(SleuthkitCase db, long id, String path, long createdTime, String sourceModuleName, String reportName, Content parent) {
 		this.db = db;
@@ -67,11 +71,10 @@ public class Report implements Content {
 		this.pathAsString = path;
 		if (path.startsWith("http")) {
 			this.pathAsPath = null;
-		}
-		else {
+		} else {
 			this.pathAsPath = Paths.get(path);
 		}
-		
+
 		this.createdTime = createdTime;
 		this.sourceModuleName = sourceModuleName;
 		this.reportName = reportName;
@@ -89,7 +92,7 @@ public long getId() {
 	 * @return
 	 */
 	public String getPath() {
-		return (pathAsPath != null ? pathAsPath.toString() : pathAsString);		
+		return (pathAsPath != null ? pathAsPath.toString() : pathAsString);
 	}
 
 	/**
@@ -150,8 +153,9 @@ public int read(byte[] buf, long offset, long len) throws TskCoreException {
 	@Override
 	public void close() {
 		try {
-			if (fileChannel != null) 
+			if (fileChannel != null) {
 				fileChannel.close();
+			}
 		} catch (IOException ex) {
 			LOGGER.log(Level.WARNING, "Failed to close report file.", ex);
 		}
@@ -160,7 +164,7 @@ public void close() {
 	@Override
 	public long getSize() {
 		try {
-			return (pathAsPath != null ?  Files.size(pathAsPath) : 0);
+			return (pathAsPath != null ? Files.size(pathAsPath) : 0);
 		} catch (IOException ex) {
 			LOGGER.log(Level.SEVERE, "Failed to get size of report.", ex);
 			// If we cannot determine the size of the report, return zero
@@ -228,14 +232,81 @@ public List<Long> getChildrenIds() throws TskCoreException {
 		return Collections.<Long>emptyList();
 	}
 
+	@Deprecated
 	@Override
 	public BlackboardArtifact newArtifact(int artifactTypeID) throws TskCoreException {
 		if (artifactTypeID != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) {
 			throw new TskCoreException("Reports can only have keyword hit artifacts.");
 		}
-		return db.newBlackboardArtifact(artifactTypeID, objectId);
+
+		long fileObjId = getId();
+		long dsObjId = getDataSource() == null ? null : getDataSource().getId();
+
+		try {
+			return db.getBlackboard().newAnalysisResult(
+					KEYWORD_HIT_TYPE, fileObjId, dsObjId, Score.SCORE_UNKNOWN,
+					null, null, null, Collections.emptyList())
+					.getAnalysisResult();
+		} catch (BlackboardException ex) {
+			throw new TskCoreException("Unable to get analysis result for keword hit.", ex);
+		}
+	}
+
+	@Override
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objectId, this.getDataSource().getId(), score, conclusion, configuration, justification, attributesList, trans);
+
+			trans.commit();
+			return resultAdded;
+		} catch (BlackboardException ex) {
+			trans.rollback();
+			throw new TskCoreException("Error adding analysis result.", ex);
+		}
+	}
+
+	@Override
+	public AnalysisResultAdded newAnalysisResult(BlackboardArtifact.Type artifactType, Score score, String conclusion, String configuration, String justification, Collection<BlackboardAttribute> attributesList, long dataSourceId) throws TskCoreException {
+		CaseDbTransaction trans = db.beginTransaction();
+		try {
+			AnalysisResultAdded resultAdded = db.getBlackboard().newAnalysisResult(artifactType, objectId, dataSourceId, score, conclusion, configuration, justification, attributesList, trans);
+
+			trans.commit();
+			return resultAdded;
+		} catch (BlackboardException ex) {
+			trans.rollback();
+			throw new TskCoreException("Error adding analysis result.", ex);
+		}
+	}
+
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId) throws TskCoreException {
+
+		if (artifactType.getTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) {
+			throw new TskCoreException("Reports can only have keyword hit artifacts.");
+		}
+		
+		return db.getBlackboard().newDataArtifact(artifactType, objectId, this.getDataSource().getId(), attributesList, osAccountId);
+	}
+
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList, Long osAccountId, long dataSourceId) throws TskCoreException {
+
+		if (artifactType.getTypeID() != BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID()) {
+			throw new TskCoreException("Reports can only have keyword hit artifacts.");
+		}
+		
+		return db.getBlackboard().newDataArtifact(artifactType, objectId, dataSourceId, attributesList, osAccountId);
 	}
 
+	@Override
+	public DataArtifact newDataArtifact(BlackboardArtifact.Type artifactType, Collection<BlackboardAttribute> attributesList) throws TskCoreException {
+		return newDataArtifact(artifactType, attributesList, null);
+	}
+	
+	@Deprecated
+	@SuppressWarnings("deprecation")
 	@Override
 	public BlackboardArtifact newArtifact(BlackboardArtifact.ARTIFACT_TYPE type) throws TskCoreException {
 		return newArtifact(type.getTypeID());
@@ -282,6 +353,26 @@ public ArrayList<BlackboardArtifact> getAllArtifacts() throws TskCoreException {
 		return db.getMatchingArtifacts("WHERE obj_id = " + objectId); //NON-NLS
 	}
 
+	@Override
+	public List<AnalysisResult> getAllAnalysisResults() throws TskCoreException {
+		return db.getBlackboard().getAnalysisResults(objectId);
+	}
+	
+	@Override
+	public List<DataArtifact> getAllDataArtifacts() throws TskCoreException {
+		return db.getBlackboard().getDataArtifactsBySource(objectId);
+	}
+
+	@Override
+	public List<AnalysisResult> getAnalysisResults(BlackboardArtifact.Type artifactType) throws TskCoreException {
+		return db.getBlackboard().getAnalysisResults(objectId, artifactType.getTypeID());
+	}
+
+	@Override
+	public Score getAggregateScore() throws TskCoreException {
+		return db.getScoringManager().getAggregateScore(objectId);
+	}
+
 	@Override
 	public Set<String> getHashSetNames() throws TskCoreException {
 		return Collections.<String>emptySet();
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Score.java b/bindings/java/src/org/sleuthkit/datamodel/Score.java
new file mode 100644
index 0000000000000000000000000000000000000000..37e2802a3e318aa1bd767cd99961f16ff7c1ee17
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/Score.java
@@ -0,0 +1,249 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.ResourceBundle;
+
+/**
+ * Encapsulates either an analysis result score or the aggregate score of
+ * Content. A score measures how likely the Content object is to be relevant to
+ * an investigation. Relevance is determined by a series of analysis techniques,
+ * each of which has a score. The aggregate score for an item is then determined
+ * based on its analysis results.
+ *
+ * A score has two primary fields: Significance and Priority.
+ *
+ * There are two priorities : Normal and Override. Nearly everything should have
+ * a "Normal" priority. "Override" is used when a user wants to change the score
+ * because of a false positive.  An "Override" score will take priority over 
+ * the combined "Normal" scores.  An item should have only one "Override" score
+ * at a time, but that is not currently enforced. 
+ *
+ * The significance is a range of how Notable (i.e. "Bad") the item is. The
+ * range is from NONE (i.e. "Good") to NOTABLE with values in the middle, such
+ * as LIKELY_NOTABLE for suspicious items. The LIKELY_ values are used when
+ * there is less confidence in the result. The significance has to do with the
+ * false positive rate at actually detecting notable or benign things.
+ *
+ *
+ * For an example, if a file is found in a MD5 hashset of notable files, then a
+ * module would use a significance of NOTABLE. This is because the MD5 is exact
+ * match and the hash set is all notable files.
+ *
+ * For a keyword hit, the significance would be LIKELY_NOTABLE because keywords
+ * often can be used in both good and bad ways. A user will need to review the
+ * file to determine if it is a true or false positive.
+ *
+ * If a file is found to be on a good list (via MD5), then it could have a
+ * significance of NONE and then other modules could ignore it.
+ *
+ * An aggregate score is the combination of the specific analysis results.
+ * USER_RESULTS will overrule NORMAL. NOTABLE overrules NONE. Both of those
+ * overrule the LIKELY_* results. 
+ * 
+ * NOTABLE > NONE > LIKELY_NOTABLE > LIKELY_NONE > UNKNOWN
+ */
+public class Score implements Comparable<Score> {
+
+	private static final ResourceBundle bundle = ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle");
+	/**
+	 * Indicates the relevance of an item based on the analysis result's conclusion.
+         * 
+	 * For comparing significance, the following ordering applies
+	 * 
+	 * Bad > Good > Likely Bad > Likely Good > Unknown
+	 * 
+	 */
+	public enum Significance {
+
+		// Enum name must not have any spaces.
+
+        /* Notes on the ordinal numbers: We defined these so that we could easily
+         * compare values while also have some concept of grouping. 
+         * The 1x values are a higher confidence than the 0x files.
+         * NOTABLE (x9) has priority over NOT NOTABLE (x8). 
+         * If we need to make this more complicated in the future, we can add
+         * other groupings, such as 14 and 15. 
+         */
+		
+		/// no significance assigned yet.
+		UNKNOWN(0, "Unknown", "Significance.Unknown.displayName.text"),	
+		
+		/// likely good		
+		LIKELY_NONE(8, "LikelyNone", "Significance.LikelyNone.displayName.text"),
+		
+		/// likely bad, suspicious
+		LIKELY_NOTABLE(9, "LikelyNotable", "Significance.LikelyNotable.displayName.text"),	
+		
+		/// good
+		NONE(18, "None", "Significance.None.displayName.text"),		
+		
+		/// bad
+		NOTABLE(19, "Notable", "Significance.Notable.displayName.text");				
+		
+		private final int id;
+		private final String name;	// name must not have spaces
+		private final String displayNameKey; // display name is loaded from resource bundle using this key.
+
+		private Significance(int id, String name, String displayNameKey) {
+			this.id = id;
+			this.name = name;
+			this.displayNameKey = displayNameKey;
+		}
+
+		public static Significance fromString(String name) {
+			return Arrays.stream(values())
+					.filter(val -> val.getName().equals(name))
+					.findFirst().orElse(NONE);
+		}
+
+		static public Significance fromID(int id) {
+			return Arrays.stream(values())
+					.filter(val -> val.getId() == id)
+					.findFirst().orElse(NONE);
+		}
+
+		/**
+		 * Get enum ordinal.
+		 * 
+		 * @return Ordinal.
+		 */
+		public int getId() {
+			return id;
+		}
+
+        /**
+         * Gets name that has no spaces in it.
+         * Does not get translated.
+		 * 
+		 * @return Name.
+         */
+		public String getName() {
+			return name;
+		}
+
+        /**
+         * Gets display name that may have spaces and can be used in the UI.
+         * May return a translated version. 
+		 * 
+		 * @return Display name.
+         */
+		public String getDisplayName() {
+			return  bundle.getString(displayNameKey);
+		}
+			
+		@Override
+		public String toString() {
+			return name;
+		}
+	}
+
+	/**
+	 * Represents the priority of the score to allow overrides by a user or module
+	 */
+	public enum Priority {
+
+		// Name must not have any spaces.
+		NORMAL(0, "Normal",  "Score.Priority.Normal.displayName.text"),
+		OVERRIDE(10, "Override", "Score.Priority.Override.displayName.text"); 
+
+		private final int id;
+		private final String name; 
+		private final String displayNameKey; // display name is loaded from resource bundle using this key.
+		
+		private Priority(int id, String name, String displayNameKey) {
+			this.id = id;
+			this.name = name;
+			this.displayNameKey = displayNameKey;
+		}
+
+		public static Priority fromString(String name) {
+			return Arrays.stream(values())
+					.filter(val -> val.getName().equals(name))
+					.findFirst().orElse(NORMAL);
+		}
+
+		static public Priority fromID(int id) {
+			return Arrays.stream(values())
+					.filter(val -> val.getId() == id)
+					.findFirst().orElse(NORMAL);
+		}
+
+		public int getId() {
+			return id;
+		}
+
+		public String getName() {
+			return name;
+		}
+
+		public String getDisplayName() {
+			return bundle.getString(displayNameKey);
+		}
+		
+		@Override
+		public String toString() {
+			return name;
+		}
+	}
+
+	public static final Score SCORE_NOTABLE = new Score(Significance.NOTABLE, Priority.NORMAL);
+	public static final Score SCORE_LIKELY_NOTABLE = new Score(Significance.LIKELY_NOTABLE, Priority.NORMAL);	
+	public static final Score SCORE_LIKELY_NONE = new Score(Significance.LIKELY_NONE, Priority.NORMAL);
+	public static final Score SCORE_NONE= new Score(Significance.NONE, Priority.NORMAL);
+	
+	public static final Score SCORE_UNKNOWN = new Score(Significance.UNKNOWN, Priority.NORMAL);
+	
+	// Score is a combination of significance and priority.
+	private final Significance significance;
+	private final Priority priority;
+
+	public Score(Significance significance, Priority priority) {
+		this.significance = significance;
+		this.priority = priority;
+	}
+
+	public Significance getSignificance() {
+		return significance;
+	}
+
+	public Priority getPriority() {
+		return priority;
+	}
+
+	@Override
+	public int compareTo(Score other) {
+		// A score is a combination of significance & priority.
+		// Priority Override overrides Normal.
+		// If two results have same priority, then the higher significance wins.
+		if (this.getPriority() != other.getPriority()) {
+			return this.getPriority().ordinal() - other.getPriority().ordinal();
+		} else {
+			return this.getSignificance().ordinal() - other.getSignificance().ordinal();
+		}
+	}
+	
+	 public static final Comparator<Score> getScoreComparator() {
+        return (Score score1, Score score2) -> {
+			return score1.compareTo(score2);
+        };
+    }
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/ScoreChange.java b/bindings/java/src/org/sleuthkit/datamodel/ScoreChange.java
new file mode 100644
index 0000000000000000000000000000000000000000..cf051b71dd02b7fef7840e4864eb37c8addee96c
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/ScoreChange.java
@@ -0,0 +1,55 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Optional;
+
+/**
+ * This class encapsulates a score change.
+ */
+final public class ScoreChange {
+
+	private final long objId;
+	private final Long dataSourceObjectId;
+	private final Score oldScore;
+	private final Score newScore;
+
+	ScoreChange(long objId, Long dataSourceObjectId, Score oldScore, Score newScore) {
+		this.objId = objId;
+		this.dataSourceObjectId = dataSourceObjectId;
+		this.oldScore = oldScore;
+		this.newScore = newScore;
+	}
+
+	public Long getDataSourceObjectId() {
+		return dataSourceObjectId;
+	}
+
+	public long getObjectId() {
+		return objId;
+	}
+
+	public Score getOldScore() {
+		return oldScore;
+	}
+
+	public Score getNewScore() {
+		return newScore;
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/ScoringManager.java b/bindings/java/src/org/sleuthkit/datamodel/ScoringManager.java
new file mode 100644
index 0000000000000000000000000000000000000000..0c8b70a7c5e4c29bb0081a7be394530dd4c3dfc4
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/ScoringManager.java
@@ -0,0 +1,406 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.Map;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import org.sleuthkit.datamodel.Score.Priority;
+import org.sleuthkit.datamodel.Score.Significance;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
+
+/**
+ * The scoring manager is responsible for updating and querying the score of
+ * objects.
+ *
+ */
+public class ScoringManager {
+
+	private static final Logger LOGGER = Logger.getLogger(ScoringManager.class.getName());
+
+	private final SleuthkitCase db;
+
+	/**
+	 * Construct a ScoringManager for the given SleuthkitCase.
+	 *
+	 * @param skCase The SleuthkitCase
+	 *
+	 */
+	ScoringManager(SleuthkitCase skCase) {
+		this.db = skCase;
+	}
+
+	/**
+	 * Get the aggregate score for the given object.
+	 *
+	 * @param objId Object id.
+	 *
+	 * @return Score, if it is found, unknown otherwise.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Score getAggregateScore(long objId) throws TskCoreException {
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			return getAggregateScore(objId, connection);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get the aggregate scores for the given list of object ids.
+	 *
+	 * @param objIds Object id list.
+	 *
+	 * @return Map<Long, Score> Each input object id will be mapped. If a score 
+	 * is not found for an object Unknown score will be mapped.
+	 *
+	 * @throws TskCoreException
+	 */
+	public Map<Long, Score> getAggregateScores(List<Long> objIds) throws TskCoreException {
+
+		if (objIds.isEmpty()) {
+			return Collections.emptyMap();
+		}
+
+		String queryString = "SELECT obj_id, significance, priority FROM tsk_aggregate_score WHERE obj_id in "
+				+ objIds.stream().map(l -> l.toString()).collect(Collectors.joining(",", "(", ")"));
+
+		Map<Long, Score> results = objIds.stream().collect(Collectors.toMap( key -> key, key -> Score.SCORE_UNKNOWN));
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) {
+				while (rs.next()) {
+					Long objId = rs.getLong("obj_id");
+					Score score = new Score(Significance.fromID(rs.getInt("significance")), Priority.fromID(rs.getInt("priority")));
+					results.put(objId, score);
+				}
+			} catch (SQLException ex) {
+				throw new TskCoreException("SQLException thrown while running query: " + queryString, ex);
+			}
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+		return results;
+	}
+
+
+	/**
+	 * Get the aggregate score for the given object. Uses the connection from the
+	 * given transaction.
+	 *
+	 * @param objId      Object id.
+	 * @param transaction Transaction that provides the connection to use.
+	 *
+	 * @return Score, if it is found, unknown otherwise.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Score getAggregateScore(long objId, CaseDbTransaction transaction) throws TskCoreException {
+		CaseDbConnection connection = transaction.getConnection();
+		return getAggregateScore(objId, connection);
+	}
+
+	/**
+	 * Get the aggregate score for the given object.
+	 *
+	 * @param objId Object id.
+	 * @param connection Connection to use for the query.
+	 *
+	 * @return Score, if it is found, SCORE_UNKNOWN otherwise.
+	 *
+	 * @throws TskCoreException
+	 */
+	private Score getAggregateScore(long objId, CaseDbConnection connection) throws TskCoreException {
+		String queryString = "SELECT significance, priority FROM tsk_aggregate_score WHERE obj_id = " + objId;
+		try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, queryString)) {
+			if (rs.next()) {
+				return new Score(Significance.fromID(rs.getInt("significance")), Priority.fromID(rs.getInt("priority")));
+			} else {
+				return Score.SCORE_UNKNOWN;
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException("SQLException thrown while running query: " + queryString, ex);
+		} 
+	}
+
+ 
+	/**
+	 * Inserts or updates the score for the given object.
+	 *
+	 * @param objId              Object id of the object.
+	 * @param dataSourceObjectId Data source object id, may be null.
+	 * @param score              Score to be inserted/updated.
+	 * @param transaction        Transaction to use for the update.
+	 *
+	 * @throws TskCoreException
+	 */
+	private void setAggregateScore(long objId, Long dataSourceObjectId, Score score, CaseDbTransaction transaction) throws TskCoreException {
+
+		String insertSQLString = "INSERT INTO tsk_aggregate_score (obj_id, data_source_obj_id, significance , priority) VALUES (?, ?, ?, ?)"
+				+ " ON CONFLICT (obj_id) DO UPDATE SET significance = ?, priority = ?";
+
+		CaseDbConnection connection = transaction.getConnection();
+		try {
+			PreparedStatement preparedStatement = connection.getPreparedStatement(insertSQLString, Statement.NO_GENERATED_KEYS);
+			preparedStatement.clearParameters();
+
+			preparedStatement.setLong(1, objId);
+			if (dataSourceObjectId != null) {
+				preparedStatement.setLong(2, dataSourceObjectId);
+			} else {
+				preparedStatement.setNull(2, java.sql.Types.NULL);
+			}
+			preparedStatement.setInt(3, score.getSignificance().getId());
+			preparedStatement.setInt(4, score.getPriority().getId());
+
+			preparedStatement.setInt(5, score.getSignificance().getId());
+			preparedStatement.setInt(6, score.getPriority().getId());
+
+			connection.executeUpdate(preparedStatement);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating aggregate score, query: %s for objId = %d", insertSQLString, objId), ex);//NON-NLS
+		}  
+
+	}
+
+
+
+	/**
+	 * Updates the score for the specified object after a result has been
+	 * added. Is optimized to do nothing if the new score is less than the
+	 * current aggregate score. 
+	 *
+	 * @param objId              Object id.
+	 * @param dataSourceObjectId Object id of the data source, may be null.
+	 * @param newResultScore        Score for a newly added analysis result.
+	 * @param transaction        Transaction to use for the update.
+	 *
+	 * @return Aggregate score for the object.
+	 *
+	 * @throws TskCoreException
+	 */
+	Score updateAggregateScoreAfterAddition(long objId, Long dataSourceObjectId, Score newResultScore, CaseDbTransaction transaction) throws TskCoreException {
+
+		/* get an exclusive write lock on the DB before we read anything so that we know we are
+		 * the only one reading existing scores and updating.  The risk is that two computers
+		 * could update the score and the aggregate score ends up being incorrect. 
+		 * 
+		 * NOTE: The alternative design is to add a 'version' column for opportunistic locking
+		 * and calculate these outside of a transaction.  We opted for table locking for performance
+		 * reasons so that we can still add the analysis results in a batch.  That remains an option
+		 * if we get into deadlocks with the current design. 
+		 */
+		try {
+			CaseDbConnection connection = transaction.getConnection();
+			connection.getAggregateScoreTableWriteLock();
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting exclusive write lock on aggregate score table", ex);//NON-NLS
+		}
+			
+		
+		// Get the current score 
+		Score currentAggregateScore = ScoringManager.this.getAggregateScore(objId, transaction);
+
+		// If current score is Unknown And newscore is not Unknown - allow None (good) to be recorded
+		// or if the new score is higher than the current score
+		if  ( (currentAggregateScore.compareTo(Score.SCORE_UNKNOWN) == 0 && newResultScore.compareTo(Score.SCORE_UNKNOWN) != 0)
+			  || (Score.getScoreComparator().compare(newResultScore, currentAggregateScore) > 0)) {
+			setAggregateScore(objId, dataSourceObjectId, newResultScore, transaction);
+			
+			// register score change in the transaction.
+			transaction.registerScoreChange(new ScoreChange(objId, dataSourceObjectId, currentAggregateScore, newResultScore));
+			return newResultScore;
+		} else {
+			// return the current score
+			return currentAggregateScore;
+		}
+	}
+	
+	/**
+	 * Recalculate the aggregate score after an analysis result was 
+	 * deleted.
+	 * 
+	 * @param objId Content that had result deleted from
+	 * @param dataSourceObjectId Data source content is in
+	 * @param transaction 
+	 * @return New Score
+	 * @throws TskCoreException 
+	 */
+	Score updateAggregateScoreAfterDeletion(long objId, Long dataSourceObjectId, CaseDbTransaction transaction) throws TskCoreException {
+
+		CaseDbConnection connection = transaction.getConnection();
+		
+		/* get an exclusive write lock on the DB before we read anything so that we know we are
+		 * the only one reading existing scores and updating.  The risk is that two computers
+		 * could update the score and the aggregate score ends up being incorrect. 
+		 * 
+		 * NOTE: The alternative design is to add a 'version' column for opportunistic locking
+		 * and calculate these outside of a transaction.  We opted for table locking for performance
+		 * reasons so that we can still add the analysis results in a batch.  That remains an option
+		 * if we get into deadlocks with the current design. 
+		 */
+		try {
+			connection.getAggregateScoreTableWriteLock();
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting exclusive write lock on aggregate score table", ex);//NON-NLS
+		}
+			
+		// Get the current score 
+		Score currentScore = ScoringManager.this.getAggregateScore(objId, transaction);
+
+		// Calculate the score from scratch by getting all of them and getting the highest
+		List<AnalysisResult> analysisResults = db.getBlackboard().getAnalysisResults(objId, connection);
+		Score newScore = Score.SCORE_UNKNOWN;
+		for (AnalysisResult iter : analysisResults) {
+			Score iterScore = iter.getScore();
+			if (Score.getScoreComparator().compare(iterScore, newScore) > 0) {
+				newScore = iterScore;
+			}
+		}
+
+		// get the maximum score of the calculated aggregate score of analysis results
+		// or the score derived from the maximum known status of a content tag on this content.
+		Optional<Score> tagScore = db.getTaggingManager().getMaxTagKnownStatus(objId, transaction)
+				.map(knownStatus -> TaggingManager.getTagScore(knownStatus));
+		
+		if (tagScore.isPresent() && Score.getScoreComparator().compare(tagScore.get(), newScore) > 0) {
+			newScore = tagScore.get();
+		}
+		
+		// only change the DB if we got a new score. 
+		if (newScore.compareTo(currentScore) != 0) {
+			setAggregateScore(objId, dataSourceObjectId, newScore, transaction);
+
+			// register the score change with the transaction so an event can be fired for it. 
+			transaction.registerScoreChange(new ScoreChange(objId, dataSourceObjectId, currentScore, newScore));
+		}
+		return newScore;
+	}
+	
+	/**
+	 * Get the count of contents within the specified data source
+	 * with the specified significance.
+	 *
+	 * @param dataSourceObjectId Data source object id.
+	 * @param significance Significance to look for.
+	 *
+	 * @return Number of contents with given score.
+	 * @throws TskCoreException if there is an error getting the count. 
+	 */
+	public long getContentCount(long dataSourceObjectId, Score.Significance significance) throws TskCoreException {
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			return getContentCount(dataSourceObjectId, significance, connection);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+
+	/**
+	 * Get the count of contents with the specified significance. Uses the
+	 * specified database connection.
+	 *
+	 * @param dataSourceObjectId Data source object id.
+	 * @param significance       Significance to look for.
+	 * @param connection         Database connection to use..
+	 *
+	 * @return Number of contents with given score.
+	 *
+	 * @throws TskCoreException if there is an error getting the count.
+	 */
+	private long getContentCount(long dataSourceObjectId, Score.Significance significance, CaseDbConnection connection) throws TskCoreException {
+		String queryString = "SELECT COUNT(obj_id) AS count FROM tsk_aggregate_score"
+				+ " WHERE data_source_obj_id = " + dataSourceObjectId
+				+ " AND significance = " + significance.getId();
+
+		try (Statement statement = connection.createStatement();
+				ResultSet resultSet = connection.executeQuery(statement, queryString);) {
+
+			long count = 0;
+			if (resultSet.next()) {
+				count = resultSet.getLong("count");
+			}
+			return count;
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting count of items with significance = " + significance.toString(), ex);
+		}
+	}
+	
+	/**
+	 * Get the contents with the specified score.
+	 *
+	 * @param dataSourceObjectId Data source object id.
+	 * @param significance       Significance to look for.
+	 *
+	 * @return Collection of contents with given score.
+	 * 
+	 * @throws TskCoreException if there is an error getting the contents.
+	 */
+	public List<Content> getContent(long dataSourceObjectId, Score.Significance significance) throws TskCoreException {
+		db.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = db.getConnection()) {
+			return getContent(dataSourceObjectId, significance, connection);
+		} finally {
+			db.releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Gets the contents with the specified score. Uses the specified
+	 * database connection.
+	 *
+	 * @param dataSourceObjectId Data source object id.
+	 * @param significance       Significance to look for.
+	 * @param connection         Connection to use for the query.
+	 *
+	 * @return List of contents with given score.
+	 *
+	 * @throws TskCoreException
+	 */
+	private List<Content> getContent(long dataSourceObjectId, Score.Significance significance, CaseDbConnection connection) throws TskCoreException {
+		String queryString = "SELECT obj_id FROM tsk_aggregate_score"
+				+ " WHERE data_source_obj_id = " + dataSourceObjectId 
+				+ " AND significance = " + significance.getId();
+			
+		try (Statement statement = connection.createStatement();
+				ResultSet resultSet = connection.executeQuery(statement, queryString);) {
+
+			List<Content> items = new ArrayList<>();
+			while (resultSet.next()) {
+				long objId = resultSet.getLong("obj_id");
+				items.add(db.getContentById(objId));
+			}
+			return items;
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting list of items with significance = " + significance.toString(), ex);
+		} 
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SlackFile.java b/bindings/java/src/org/sleuthkit/datamodel/SlackFile.java
index df038e6021013a8f0a5b706e27721a721b2856d5..8cf9407e5c3209787c76fbf27684b59824041c33 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/SlackFile.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SlackFile.java
@@ -18,6 +18,7 @@
  */
 package org.sleuthkit.datamodel;
 
+import java.util.Collections;
 import org.sleuthkit.datamodel.TskData.FileKnown;
 import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
@@ -76,6 +77,10 @@ public class SlackFile extends FsContent {
 	 *                           yet been determined.
 	 * @param extension	         The extension part of the file name (not
 	 *                           including the '.'), can be null.
+	 * @param ownerUid			 UID of the file owner as found in the file
+	 *                           system, can be null.
+	 * @param osAccountObjId	 Obj id of the owner OS account, may be null.
+	 * 
 	 */
 	SlackFile(SleuthkitCase db,
 			long objId,
@@ -90,8 +95,10 @@ public class SlackFile extends FsContent {
 			long ctime, long crtime, long atime, long mtime,
 			short modes, int uid, int gid,
 			String md5Hash, String sha256Hash, FileKnown knownState, String parentPath, String mimeType,
-			String extension) {
-		super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.SLACK, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension);
+			String extension,
+			String ownerUid,
+			Long osAccountObjId) {
+		super(db, objId, dataSourceObjectId, fsObjId, attrType, attrId, name, TskData.TSK_DB_FILES_TYPE_ENUM.SLACK, metaAddr, metaSeq, dirType, metaType, dirFlag, metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, extension, ownerUid, osAccountObjId, Collections.emptyList());
 	}
 
 	/**
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
index 40436741704157b731d3ee429878a5cb251f6c23..32326ed21baf4cf23831a6bf61519d889e7375bd 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2011-2020 Basis Technology Corp.
+ * Copyright 2011-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,6 +18,8 @@
  */
 package org.sleuthkit.datamodel;
 
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
 import com.google.common.collect.ImmutableSet;
 import com.google.common.eventbus.EventBus;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
@@ -57,16 +59,21 @@
 import java.util.List;
 import java.util.Map;
 import java.util.MissingResourceException;
+import java.util.Objects;
 import java.util.Properties;
 import java.util.ResourceBundle;
 import java.util.Set;
 import java.util.UUID;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.ReentrantReadWriteLock;
 import java.util.logging.Level;
 import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import org.apache.commons.lang3.StringUtils;
 import org.postgresql.util.PSQLState;
 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
+import org.sleuthkit.datamodel.BlackboardArtifact.Category;
 import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
 import org.sleuthkit.datamodel.BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE;
 import org.sleuthkit.datamodel.IngestJobInfo.IngestJobStatusType;
@@ -98,7 +105,7 @@ public class SleuthkitCase {
 	 * tsk/auto/tsk_db.h.
 	 */
 	static final CaseDbSchemaVersionNumber CURRENT_DB_SCHEMA_VERSION
-			= new CaseDbSchemaVersionNumber(8, 6);
+			= new CaseDbSchemaVersionNumber(9, 1);
 
 	private static final long BASE_ARTIFACT_ID = Long.MIN_VALUE; // Artifact ids will start at the lowest negative value
 	private static final Logger logger = Logger.getLogger(SleuthkitCase.class.getName());
@@ -176,6 +183,7 @@ public class SleuthkitCase {
 	private static final String CREATION_SCHEMA_MINOR_VERSION_KEY = "CREATION_SCHEMA_MINOR_VERSION";
 
 	private final ConnectionPool connections;
+	private final Object carvedFileDirsLock = new Object();
 	private final Map<Long, VirtualDirectory> rootIdsToCarvedFileDirs = new HashMap<>();
 	private final Map<Long, FileSystem> fileSystemIdMap = new HashMap<>(); // Cache for file system files.
 	private final List<ErrorObserver> sleuthkitCaseErrorObservers = new ArrayList<>();
@@ -192,6 +200,12 @@ public class SleuthkitCase {
 	private Map<String, BlackboardAttribute.Type> typeNameToAttributeTypeMap;
 	private CaseDbSchemaVersionNumber caseDBSchemaCreationVersion;
 
+	// Objects for caching the result of isRootDirectory(). Lock is for visibility only.
+	private final Object rootDirectoryMapLock = new Object();
+	private final Map<RootDirectoryKey, Long> rootDirectoryMap = new HashMap<>();
+	private final Cache<Long, Boolean> isRootDirectoryCache
+			= CacheBuilder.newBuilder().maximumSize(200000).expireAfterAccess(5, TimeUnit.MINUTES).build();
+
 	/*
 	 * First parameter is used to specify the SparseBitSet to use, as object IDs
 	 * can be larger than the max size of a SparseBitSet
@@ -209,7 +223,14 @@ public class SleuthkitCase {
 	private TimelineManager timelineMgr;
 	private Blackboard blackboard;
 	private CaseDbAccessManager dbAccessManager;
+	private FileManager fileManager;
 	private TaggingManager taggingMgr;
+	private ScoringManager scoringManager;
+	private OsAccountRealmManager osAccountRealmManager;
+	private OsAccountManager osAccountManager;
+	private HostManager hostManager;
+	private PersonManager personManager;
+	private HostAddressManager hostAddressManager;
 
 	private final Map<String, Set<Long>> deviceIdToDatasourceObjIdMap = new HashMap<>();
 
@@ -231,12 +252,12 @@ void fireTSKEvent(Object event) {
 	private final Map<Long, Content> frequentlyUsedContentMap = new HashMap<>();
 
 	private Examiner cachedCurrentExaminer = null;
-	
+
 	static {
 		Properties p = new Properties(System.getProperties());
-        p.put("com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog");
-        p.put("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "SEVERE");
-        System.setProperties(p);
+		p.put("com.mchange.v2.log.MLog", "com.mchange.v2.log.FallbackMLog");
+		p.put("com.mchange.v2.log.FallbackMLog.DEFAULT_CUTOFF_LEVEL", "SEVERE");
+		System.setProperties(p);
 	}
 
 	/**
@@ -365,13 +386,13 @@ private void init() throws Exception {
 		typeNameToAttributeTypeMap = new ConcurrentHashMap<>();
 
 		/*
-		 * The following methods need to be called before updateDatabaseSchema
-		 * due to the way that updateFromSchema2toSchema3 was implemented.
+		 * The database schema must be updated before loading blackboard
+		 * artifact/attribute types
 		 */
+		updateDatabaseSchema(null);
 		initBlackboardArtifactTypes();
 		initBlackboardAttributeTypes();
 		initNextArtifactId();
-		updateDatabaseSchema(null);
 
 		try (CaseDbConnection connection = connections.getConnection()) {
 			initIngestModuleTypes(connection);
@@ -384,10 +405,17 @@ private void init() throws Exception {
 		}
 
 		blackboard = new Blackboard(this);
+		fileManager = new FileManager(this);
 		communicationsMgr = new CommunicationsManager(this);
 		timelineMgr = new TimelineManager(this);
 		dbAccessManager = new CaseDbAccessManager(this);
 		taggingMgr = new TaggingManager(this);
+		scoringManager = new ScoringManager(this);
+		osAccountRealmManager = new OsAccountRealmManager(this);
+		osAccountManager = new OsAccountManager(this);
+		hostManager = new HostManager(this);
+		personManager = new PersonManager(this);
+		hostAddressManager = new HostAddressManager(this);
 	}
 
 	/**
@@ -468,6 +496,15 @@ public CommunicationsManager getCommunicationsManager() throws TskCoreException
 	public Blackboard getBlackboard() {
 		return blackboard;
 	}
+	
+	/**
+	 * Gets the file manager for this case.
+	 * 
+	 * @return The per case FileManager object.
+	 */
+	public FileManager getFileManager() {
+		return fileManager;
+	}
 
 	/**
 	 * Gets the communications manager for this case.
@@ -500,6 +537,72 @@ public synchronized TaggingManager getTaggingManager() {
 		return taggingMgr;
 	}
 
+	/**
+	 * Gets the scoring manager for this case.
+	 *
+	 * @return The per case ScoringManager object.
+	 *
+	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 */
+	public ScoringManager getScoringManager() throws TskCoreException {
+		return scoringManager;
+	}
+
+	/**
+	 * Gets the OS account realm manager for this case.
+	 *
+	 * @return The per case OsAccountRealmManager object.
+	 *
+	 * @throws TskCoreException
+	 */
+	public OsAccountRealmManager getOsAccountRealmManager() throws TskCoreException {
+		return osAccountRealmManager;
+	}
+
+	/**
+	 * Gets the OS account manager for this case.
+	 *
+	 * @return The per case OsAccountManager object.
+	 *
+	 * @throws TskCoreException
+	 */
+	public OsAccountManager getOsAccountManager() throws TskCoreException {
+		return osAccountManager;
+	}
+
+	/**
+	 * Gets the Hosts manager for this case.
+	 *
+	 * @return The per case HostManager object.
+	 *
+	 * @throws TskCoreException
+	 */
+	public HostManager getHostManager() throws TskCoreException {
+		return hostManager;
+	}
+
+	/**
+	 * Gets the Person manager for this case.
+	 *
+	 * @return The per case PersonManager object.
+	 *
+	 * @throws TskCoreException
+	 */
+	public PersonManager getPersonManager() throws TskCoreException {
+		return personManager;
+	}
+
+	/**
+	 * Gets the HostAddress manager for this case.
+	 *
+	 * @return The per case HostAddressManager object.
+	 *
+	 * @throws TskCoreException
+	 */
+	public HostAddressManager getHostAddressManager() throws TskCoreException {
+		return hostAddressManager;
+	}
+
 	/**
 	 * Make sure the predefined artifact types are in the artifact types table.
 	 *
@@ -507,23 +610,19 @@ public synchronized TaggingManager getTaggingManager() {
 	 * @throws TskCoreException
 	 */
 	private void initBlackboardArtifactTypes() throws SQLException, TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		Statement statement = null;
-		ResultSet resultSet = null;
 		acquireSingleUserCaseWriteLock();
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = connections.getConnection();
+			Statement statement = connection.createStatement();) {
 			for (ARTIFACT_TYPE type : ARTIFACT_TYPE.values()) {
 				try {
-					statement.execute("INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name) VALUES (" + type.getTypeID() + " , '" + type.getLabel() + "', '" + type.getDisplayName() + "')"); //NON-NLS
+					statement.execute("INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name, category_type) VALUES (" + type.getTypeID() + " , '" + type.getLabel() + "', '" + type.getDisplayName() + "' , " + type.getCategory().getID() + ")"); //NON-NLS
 				} catch (SQLException ex) {
-					resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM blackboard_artifact_types WHERE artifact_type_id = '" + type.getTypeID() + "'"); //NON-NLS
-					resultSet.next();
-					if (resultSet.getLong("count") == 0) {
-						throw ex;
+					try (ResultSet resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM blackboard_artifact_types WHERE artifact_type_id = '" + type.getTypeID() + "'")) { //NON-NLS
+						resultSet.next();
+						if (resultSet.getLong("count") == 0) {
+							throw ex;
+						}
 					}
-					resultSet.close();
-					resultSet = null;
 				}
 				this.typeIdToArtifactTypeMap.put(type.getTypeID(), new BlackboardArtifact.Type(type));
 				this.typeNameToArtifactTypeMap.put(type.getLabel(), new BlackboardArtifact.Type(type));
@@ -533,9 +632,6 @@ private void initBlackboardArtifactTypes() throws SQLException, TskCoreException
 				statement.execute("ALTER SEQUENCE blackboard_artifact_types_artifact_type_id_seq RESTART WITH " + newPrimaryKeyIndex); //NON-NLS
 			}
 		} finally {
-			closeResultSet(resultSet);
-			closeStatement(statement);
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -548,23 +644,19 @@ private void initBlackboardArtifactTypes() throws SQLException, TskCoreException
 	 * @throws TskCoreException
 	 */
 	private void initBlackboardAttributeTypes() throws SQLException, TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		Statement statement = null;
-		ResultSet resultSet = null;
 		acquireSingleUserCaseWriteLock();
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = connections.getConnection();
+			Statement statement = connection.createStatement();) {
 			for (ATTRIBUTE_TYPE type : ATTRIBUTE_TYPE.values()) {
 				try {
 					statement.execute("INSERT INTO blackboard_attribute_types (attribute_type_id, type_name, display_name, value_type) VALUES (" + type.getTypeID() + ", '" + type.getLabel() + "', '" + type.getDisplayName() + "', '" + type.getValueType().getType() + "')"); //NON-NLS
 				} catch (SQLException ex) {
-					resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM blackboard_attribute_types WHERE attribute_type_id = '" + type.getTypeID() + "'"); //NON-NLS
-					resultSet.next();
-					if (resultSet.getLong("count") == 0) {
-						throw ex;
+					try (ResultSet resultSet = connection.executeQuery(statement, "SELECT COUNT(*) AS count FROM blackboard_attribute_types WHERE attribute_type_id = '" + type.getTypeID() + "'")) { //NON-NLS
+						resultSet.next();
+						if (resultSet.getLong("count") == 0) {
+							throw ex;
+						}
 					}
-					resultSet.close();
-					resultSet = null;
 				}
 				this.typeIdToAttributeTypeMap.put(type.getTypeID(), new BlackboardAttribute.Type(type));
 				this.typeNameToAttributeTypeMap.put(type.getLabel(), new BlackboardAttribute.Type(type));
@@ -574,9 +666,6 @@ private void initBlackboardAttributeTypes() throws SQLException, TskCoreExceptio
 				statement.execute("ALTER SEQUENCE blackboard_attribute_types_attribute_type_id_seq RESTART WITH " + newPrimaryKeyIndex); //NON-NLS
 			}
 		} finally {
-			closeResultSet(resultSet);
-			closeStatement(statement);
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -591,11 +680,12 @@ private void initBlackboardAttributeTypes() throws SQLException, TskCoreExceptio
 	 * @throws TskCoreException
 	 */
 	private void initNextArtifactId() throws SQLException, TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		Statement statement = null;
 		ResultSet resultSet = null;
 		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			resultSet = connection.executeQuery(statement, "SELECT MAX(artifact_id) AS max_artifact_id FROM blackboard_artifacts"); //NON-NLS
 			resultSet.next();
@@ -606,7 +696,7 @@ private void initNextArtifactId() throws SQLException, TskCoreException {
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -827,9 +917,7 @@ void addDataSourceToHasChildrenMap() throws TskCoreException {
 		try {
 			populateHasChildrenMap(connection);
 		} finally {
-			if (connection != null) {
-				connection.close();
-			}
+			closeConnection(connection);
 		}
 	}
 
@@ -843,11 +931,12 @@ void addDataSourceToHasChildrenMap() throws TskCoreException {
 	 * @throws Exception
 	 */
 	private void updateDatabaseSchema(String dbPath) throws Exception {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
 		Statement statement = null;
 		acquireSingleUserCaseWriteLock();
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 
 			boolean hasMinorVersion = false;
@@ -915,6 +1004,9 @@ private void updateDatabaseSchema(String dbPath) throws Exception {
 				dbSchemaVersion = updateFromSchema8dot3toSchema8dot4(dbSchemaVersion, connection);
 				dbSchemaVersion = updateFromSchema8dot4toSchema8dot5(dbSchemaVersion, connection);
 				dbSchemaVersion = updateFromSchema8dot5toSchema8dot6(dbSchemaVersion, connection);
+				dbSchemaVersion = updateFromSchema8dot6toSchema9dot0(dbSchemaVersion, connection);
+				dbSchemaVersion = updateFromSchema9dot0toSchema9dot1(dbSchemaVersion, connection);
+
 				statement = connection.createStatement();
 				connection.executeUpdate(statement, "UPDATE tsk_db_info SET schema_ver = " + dbSchemaVersion.getMajor() + ", schema_minor_ver = " + dbSchemaVersion.getMinor()); //NON-NLS
 				connection.executeUpdate(statement, "UPDATE tsk_db_info_extended SET value = " + dbSchemaVersion.getMajor() + " WHERE name = '" + SCHEMA_MAJOR_VERSION_KEY + "'"); //NON-NLS
@@ -925,12 +1017,12 @@ private void updateDatabaseSchema(String dbPath) throws Exception {
 
 			connection.commitTransaction();
 		} catch (Exception ex) { // Cannot do exception multi-catch in Java 6, so use catch-all.
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw ex;
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -1044,11 +1136,13 @@ private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersion
 			return schemaVersion;
 		}
 		Statement statement = null;
+		Statement statement2 = null;
 		Statement updateStatement = null;
 		ResultSet resultSet = null;
 		acquireSingleUserCaseWriteLock();
 		try {
 			statement = connection.createStatement();
+			statement2 = connection.createStatement();
 
 			// Add new tables for tags.
 			statement.execute("CREATE TABLE tag_names (tag_name_id INTEGER PRIMARY KEY, display_name TEXT UNIQUE, description TEXT NOT NULL, color TEXT NOT NULL)"); //NON-NLS
@@ -1093,61 +1187,105 @@ private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersion
 						+ " WHERE blackboard_attributes.artifact_id = " + artifactId + ";"); //NON-NLS
 			}
 			resultSet.close();
-			resultSet = null;
 
 			// Convert existing tag artifact and attribute rows to rows in the new tags tables.
-			// TODO: This code depends on prepared statements that could evolve with
-			// time, breaking this upgrade. The code that follows should be rewritten
-			// to do everything with SQL specific to case database schema version 2.
-			HashMap<String, TagName> tagNames = new HashMap<String, TagName>();
-			for (BlackboardArtifact artifact : getBlackboardArtifacts(ARTIFACT_TYPE.TSK_TAG_FILE)) {
-				Content content = getContentById(artifact.getObjectID());
-				String name = ""; //NON-NLS
-				String comment = ""; //NON-NLS
-				ArrayList<BlackboardAttribute> attributes = getBlackboardAttributes(artifact);
-				for (BlackboardAttribute attribute : attributes) {
-					if (attribute.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()) {
-						name = attribute.getValueString();
-					} else if (attribute.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID()) {
-						comment = attribute.getValueString();
-					}
+			Map<String, Long> tagNames = new HashMap<>();
+			long tagNameCounter = 1;
+
+			// Convert file tags.
+			// We need data from the TSK_TAG_NAME and TSK_COMMENT attributes, and need the file size from the tsk_files table.
+			resultSet = statement.executeQuery("SELECT * FROM \n"
+					+ "(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, blackboard_attributes.value_text AS name\n"
+					+ "FROM blackboard_artifacts INNER JOIN blackboard_attributes \n"
+					+ "ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n"
+					+ "WHERE blackboard_artifacts.artifact_type_id = "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID()
+					+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()
+					+ ") AS tagNames \n"
+					+ "INNER JOIN \n"
+					+ "(SELECT tsk_files.obj_id as objId2, tsk_files.size AS fileSize \n"
+					+ "FROM blackboard_artifacts INNER JOIN tsk_files \n"
+					+ "ON blackboard_artifacts.obj_id = tsk_files.obj_id) AS fileData \n"
+					+ "ON tagNames.objId = fileData.objId2 \n"
+					+ "LEFT JOIN \n"
+					+ "(SELECT value_text AS comment, artifact_id AS tagArtifactId FROM blackboard_attributes WHERE attribute_type_id = "
+					+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n"
+					+ "ON tagNames.artifactId = tagComments.tagArtifactId");
+
+			while (resultSet.next()) {
+				long objId = resultSet.getLong("objId");
+				long fileSize = resultSet.getLong("fileSize");
+				String tagName = resultSet.getString("name");
+				String tagComment = resultSet.getString("comment");
+				if (tagComment == null) {
+					tagComment = "";
 				}
-				if (!name.isEmpty()) {
-					TagName tagName;
-					if (tagNames.containsKey(name)) {
-						tagName = tagNames.get(name);
+
+				if (tagName != null && !tagName.isEmpty()) {
+					// Get the index for the tag name, adding it to the database if needed.
+					long tagNameIndex;
+					if (tagNames.containsKey(tagName)) {
+						tagNameIndex = tagNames.get(tagName);
 					} else {
-						tagName = addTagName(name, "", TagName.HTML_COLOR.NONE); //NON-NLS
-						tagNames.put(name, tagName);
+						statement2.execute("INSERT INTO tag_names (display_name, description, color) "
+								+ "VALUES(\"" + tagName + "\", \"\", \"None\")");
+						tagNames.put(tagName, tagNameCounter);
+						tagNameIndex = tagNameCounter;
+						tagNameCounter++;
 					}
-					addContentTag(content, tagName, comment, 0, content.getSize() - 1);
+
+					statement2.execute("INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset) "
+							+ "VALUES(" + objId + ", " + tagNameIndex + ", \"" + tagComment + "\", 0, " + fileSize + ")");
 				}
 			}
-			for (BlackboardArtifact artifact : getBlackboardArtifacts(ARTIFACT_TYPE.TSK_TAG_ARTIFACT)) {
-				long taggedArtifactId = -1;
-				String name = ""; //NON-NLS
-				String comment = ""; //NON-NLS
-				ArrayList<BlackboardAttribute> attributes = getBlackboardAttributes(artifact);
-				for (BlackboardAttribute attribute : attributes) {
-					if (attribute.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()) {
-						name = attribute.getValueString();
-					} else if (attribute.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID()) {
-						comment = attribute.getValueString();
-					} else if (attribute.getAttributeTypeID() == ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID()) {
-						taggedArtifactId = attribute.getValueLong();
-					}
+			resultSet.close();
+
+			// Convert artifact tags.
+			// We need data from the TSK_TAG_NAME, TSK_TAGGED_ARTIFACT, and TSK_COMMENT attributes.
+			resultSet = statement.executeQuery("SELECT * FROM \n"
+					+ "(SELECT blackboard_artifacts.obj_id AS objId, blackboard_attributes.artifact_id AS artifactId, "
+					+ "blackboard_attributes.value_text AS name\n"
+					+ "FROM blackboard_artifacts INNER JOIN blackboard_attributes \n"
+					+ "ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id \n"
+					+ "WHERE blackboard_artifacts.artifact_type_id = "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID()
+					+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAG_NAME.getTypeID()
+					+ ") AS tagNames \n"
+					+ "INNER JOIN \n"
+					+ "(SELECT value_int64 AS taggedArtifactId, artifact_id AS associatedArtifactId FROM blackboard_attributes WHERE attribute_type_id = "
+					+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TAGGED_ARTIFACT.getTypeID() + ") AS tagArtifacts \n"
+					+ "ON tagNames.artifactId = tagArtifacts.associatedArtifactId \n"
+					+ "LEFT JOIN \n"
+					+ "(SELECT value_text AS comment, artifact_id AS commentArtifactId FROM blackboard_attributes WHERE attribute_type_id = "
+					+ BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COMMENT.getTypeID() + ") AS tagComments \n"
+					+ "ON tagNames.artifactId = tagComments.commentArtifactId");
+
+			while (resultSet.next()) {
+				long artifactId = resultSet.getLong("taggedArtifactId");
+				String tagName = resultSet.getString("name");
+				String tagComment = resultSet.getString("comment");
+				if (tagComment == null) {
+					tagComment = "";
 				}
-				if (taggedArtifactId != -1 && !name.isEmpty()) {
-					TagName tagName;
-					if (tagNames.containsKey(name)) {
-						tagName = tagNames.get(name);
+				if (tagName != null && !tagName.isEmpty()) {
+					// Get the index for the tag name, adding it to the database if needed.
+					long tagNameIndex;
+					if (tagNames.containsKey(tagName)) {
+						tagNameIndex = tagNames.get(tagName);
 					} else {
-						tagName = addTagName(name, "", TagName.HTML_COLOR.NONE); //NON-NLS
-						tagNames.put(name, tagName);
+						statement2.execute("INSERT INTO tag_names (display_name, description, color) "
+								+ "VALUES(\"" + tagName + "\", \"\", \"None\")");
+						tagNames.put(tagName, tagNameCounter);
+						tagNameIndex = tagNameCounter;
+						tagNameCounter++;
 					}
-					addBlackboardArtifactTag(getBlackboardArtifact(taggedArtifactId), tagName, comment);
+
+					statement2.execute("INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment) "
+							+ "VALUES(" + artifactId + ", " + tagNameIndex + ", \"" + tagComment + "\")");
 				}
 			}
+			resultSet.close();
+
 			statement.execute(
 					"DELETE FROM blackboard_attributes WHERE artifact_id IN " //NON-NLS
 					+ "(SELECT artifact_id FROM blackboard_artifacts WHERE artifact_type_id = " //NON-NLS
@@ -1163,7 +1301,7 @@ private CaseDbSchemaVersionNumber updateFromSchema2toSchema3(CaseDbSchemaVersion
 			closeStatement(updateStatement);
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeStatement(statement2);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -2132,6 +2270,12 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot4toSchema8dot5(CaseDbSchem
 
 			statement.execute("ALTER TABLE tag_names ADD COLUMN rank INTEGER");
 
+			/*
+			 * Update existing Project Vic tag names (from Image Gallery in
+			 * Autopsy) to be part of a Tag Set. NOTE: These names are out of
+			 * date and will not work with the Project VIC Report module. New
+			 * cases will get the new names from Image Gallery.
+			 */
 			String insertStmt = "INSERT INTO tsk_tag_sets (name) VALUES ('Project VIC')";
 			if (getDatabaseType() == DbType.POSTGRESQL) {
 				statement.execute(insertStmt, Statement.RETURN_GENERATED_KEYS);
@@ -2191,7 +2335,7 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot4toSchema8dot5(CaseDbSchem
 			releaseSingleUserCaseWriteLock();
 		}
 	}
-	
+
 	private CaseDbSchemaVersionNumber updateFromSchema8dot5toSchema8dot6(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException {
 		if (schemaVersion.getMajor() != 8) {
 			return schemaVersion;
@@ -2212,7 +2356,335 @@ private CaseDbSchemaVersionNumber updateFromSchema8dot5toSchema8dot6(CaseDbSchem
 			closeStatement(statement);
 			releaseSingleUserCaseWriteLock();
 		}
-	}	
+	}
+
+	@SuppressWarnings("deprecation")
+	private CaseDbSchemaVersionNumber updateFromSchema8dot6toSchema9dot0(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException {
+		if (schemaVersion.getMajor() != 8) {
+			return schemaVersion;
+		}
+
+		if (schemaVersion.getMinor() != 6) {
+			return schemaVersion;
+		}
+
+		Statement statement = connection.createStatement();
+		acquireSingleUserCaseWriteLock();
+		try {
+			String dateDataType = "BIGINT";
+			String bigIntDataType = "BIGINT";
+			String blobDataType = "BYTEA";
+			String primaryKeyType = "BIGSERIAL";
+
+			if (this.dbType.equals(DbType.SQLITE)) {
+				dateDataType = "INTEGER";
+				bigIntDataType = "INTEGER";
+				blobDataType = "BLOB";
+				primaryKeyType = "INTEGER";
+			}
+			statement.execute("ALTER TABLE data_source_info ADD COLUMN added_date_time " + dateDataType);
+			statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_settings TEXT");
+			statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_name TEXT");
+			statement.execute("ALTER TABLE data_source_info ADD COLUMN acquisition_tool_version TEXT");
+
+			// Add category type and initialize the types. We use the list of artifact types that
+			// were categorized as analysis results as of the 8.7 update to ensure consistency in
+			// case the built-in types change in a later release.
+			statement.execute("ALTER TABLE blackboard_artifact_types ADD COLUMN category_type INTEGER DEFAULT 0");
+			String analysisTypeObjIdList
+					= BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_FILE.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_TAG_ARTIFACT.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_ARTIFACT_HIT.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_SUSPECTED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_OBJECT_DETECTED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_VERIFICATION_FAILED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_DATA_SOURCE_USAGE.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_USER_CONTENT_SUSPECTED.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_ACCOUNT_TYPE.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_YARA_HIT.getTypeID() + ", "
+					+ BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_CATEGORIZATION.getTypeID();
+			statement.execute("UPDATE blackboard_artifact_types SET category_type = " + BlackboardArtifact.Category.ANALYSIS_RESULT.getID()
+					+ " WHERE artifact_type_id IN (" + analysisTypeObjIdList + ")");
+
+			// Create tsk file attributes table
+			statement.execute("CREATE TABLE tsk_file_attributes (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "obj_id " + bigIntDataType + " NOT NULL, "
+					+ "attribute_type_id " + bigIntDataType + " NOT NULL, "
+					+ "value_type INTEGER NOT NULL, value_byte " + blobDataType + ", "
+					+ "value_text TEXT, value_int32 INTEGER, value_int64 " + bigIntDataType + ", value_double NUMERIC(20, 10), "
+					+ "FOREIGN KEY(obj_id) REFERENCES tsk_files(obj_id) ON DELETE CASCADE, "
+					+ "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))");
+
+			// create analysis results tables
+			statement.execute("CREATE TABLE tsk_analysis_results (artifact_obj_id " + bigIntDataType + " PRIMARY KEY, "
+					+ "conclusion TEXT, "
+					+ "significance INTEGER NOT NULL, "
+					/* method_category was a column in a little distributed version of 9.0. 
+					 * It was renamed to priority before public release. The 9.1 upgrade code
+					 * will add the priority column. This is commented out since it was never used. */ 
+					// + "method_category INTEGER NOT NULL, "
+					+ "configuration TEXT, justification TEXT, "
+					+ "ignore_score INTEGER DEFAULT 0 " // boolean	
+					+ ")");
+
+			statement.execute("CREATE TABLE tsk_aggregate_score( obj_id " + bigIntDataType + " PRIMARY KEY, "
+					+ "data_source_obj_id " + bigIntDataType + ", "
+					+ "significance INTEGER NOT NULL, "
+					// See comment above on why this is commented out
+					// + "method_category INTEGER NOT NULL, "
+					+ "UNIQUE (obj_id),"
+					+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
+					+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE "
+					+ ")");
+
+			// Create person table.
+			statement.execute("CREATE TABLE tsk_persons (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "name TEXT NOT NULL, " // person name
+					+ "UNIQUE(name)) ");
+
+			// Create host table.
+			statement.execute("CREATE TABLE tsk_hosts (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "name TEXT NOT NULL, " // host name
+					+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+					+ "person_id INTEGER, "
+					+ "merged_into " + bigIntDataType + ", "
+					+ "FOREIGN KEY(person_id) REFERENCES tsk_persons(id) ON DELETE SET NULL, "
+					+ "FOREIGN KEY(merged_into) REFERENCES tsk_hosts(id), "
+					+ "UNIQUE(name)) ");
+
+			// Create OS Account and related tables 
+			statement.execute("CREATE TABLE tsk_os_account_realms (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "realm_name TEXT DEFAULT NULL, " // realm name - for a domain realm, may be null
+					+ "realm_addr TEXT DEFAULT NULL, " // a sid/uid or some some other identifier, may be null
+					+ "realm_signature TEXT NOT NULL, " // Signature exists only to prevent duplicates. It is  made up of realm address/name and scope host
+					+ "scope_host_id " + bigIntDataType + " DEFAULT NULL, " // if the realm scope is a single host
+					+ "scope_confidence INTEGER, " // indicates whether we know for sure the realm scope or if we are inferring it
+					+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+					+ "merged_into " + bigIntDataType + " DEFAULT NULL, "
+					+ "UNIQUE(realm_signature), "
+					+ "FOREIGN KEY(scope_host_id) REFERENCES tsk_hosts(id),"
+					+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_account_realms(id) )");
+
+			// Add host column and create a host for each existing data source.
+			// We will create a host for each device id so that related data sources will 
+			// be associated with the same host.
+			statement.execute("ALTER TABLE data_source_info ADD COLUMN host_id INTEGER REFERENCES tsk_hosts(id)");
+			Statement updateStatement = connection.createStatement();
+			try (ResultSet resultSet = statement.executeQuery("SELECT obj_id, device_id FROM data_source_info")) {
+				Map<String, Long> hostMap = new HashMap<>();
+				long hostIndex = 1;
+				while (resultSet.next()) {
+					long objId = resultSet.getLong("obj_id");
+					String deviceId = resultSet.getString("device_id");
+
+					if (!hostMap.containsKey(deviceId)) {
+						String hostName = "Host " + hostIndex;
+						updateStatement.execute("INSERT INTO tsk_hosts (name, db_status) VALUES ('" + hostName + "', 0)");
+						hostMap.put(deviceId, hostIndex);
+						hostIndex++;
+					}
+					updateStatement.execute("UPDATE data_source_info SET host_id = " + hostMap.get(deviceId) + " WHERE obj_id = " + objId);
+				}
+			} finally {
+				closeStatement(updateStatement);
+			}
+
+			statement.execute("CREATE TABLE tsk_os_accounts (os_account_obj_id " + bigIntDataType + " PRIMARY KEY, "
+					+ "login_name TEXT DEFAULT NULL, " // login name, if available, may be null
+					+ "full_name TEXT DEFAULT NULL, " // full name, if available, may be null
+					+ "realm_id " + bigIntDataType + " NOT NULL, " // realm for the account
+					+ "addr TEXT DEFAULT NULL, " // SID/UID, if available
+					+ "signature TEXT NOT NULL, " // This exists only to prevent duplicates.  It is either the addr or the login_name whichever is not null.
+					+ "status INTEGER, " // enabled/disabled/deleted
+					+ "type INTEGER, " // service/interactive
+					+ "created_date " + bigIntDataType + " DEFAULT NULL, "
+					+ "db_status INTEGER DEFAULT 0, " // active/merged/deleted
+					+ "merged_into " + bigIntDataType + " DEFAULT NULL, "
+					+ "UNIQUE(signature, realm_id), "
+					+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
+					+ "FOREIGN KEY(realm_id) REFERENCES tsk_os_account_realms(id),"
+					+ "FOREIGN KEY(merged_into) REFERENCES tsk_os_accounts(os_account_obj_id) )");
+
+			statement.execute("CREATE TABLE tsk_os_account_attributes (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "os_account_obj_id " + bigIntDataType + " NOT NULL, "
+					+ "host_id " + bigIntDataType + ", "
+					+ "source_obj_id " + bigIntDataType + ", "
+					+ "attribute_type_id " + bigIntDataType + " NOT NULL, "
+					+ "value_type INTEGER NOT NULL, "
+					+ "value_byte " + bigIntDataType + ", "
+					+ "value_text TEXT, "
+					+ "value_int32 INTEGER, value_int64 " + bigIntDataType + ", "
+					+ "value_double NUMERIC(20, 10), "
+					+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id), "
+					+ "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id), "
+					+ "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL, "
+					+ "FOREIGN KEY(attribute_type_id) REFERENCES blackboard_attribute_types(attribute_type_id))");
+
+			statement.execute("CREATE TABLE tsk_os_account_instances (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "os_account_obj_id " + bigIntDataType + " NOT NULL, "
+					+ "data_source_obj_id " + bigIntDataType + " NOT NULL, "
+					+ "instance_type INTEGER NOT NULL, " // PerformedActionOn/ReferencedOn
+					+ "UNIQUE(os_account_obj_id, data_source_obj_id), "
+					+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id), "
+					+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )");
+
+			statement.execute("CREATE TABLE tsk_data_artifacts ( "
+					+ "artifact_obj_id " + bigIntDataType + " PRIMARY KEY, "
+					+ "os_account_obj_id " + bigIntDataType + ", "
+					+ "FOREIGN KEY(os_account_obj_id) REFERENCES tsk_os_accounts(os_account_obj_id)) ");
+
+			// add owner_uid & os_account_obj_id columns to tsk_files
+			statement.execute("ALTER TABLE tsk_files ADD COLUMN owner_uid TEXT DEFAULT NULL");
+			statement.execute("ALTER TABLE tsk_files ADD COLUMN os_account_obj_id " + bigIntDataType + " DEFAULT NULL REFERENCES tsk_os_accounts(os_account_obj_id) ");
+
+			// create host address tables
+			statement.execute("CREATE TABLE tsk_host_addresses (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "address_type INTEGER NOT NULL, "
+					+ "address TEXT NOT NULL, "
+					+ "UNIQUE(address_type, address)) ");
+
+			statement.execute("CREATE TABLE tsk_host_address_map (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "host_id " + bigIntDataType + " NOT NULL, "
+					+ "addr_obj_id " + bigIntDataType + " NOT NULL, "
+					+ "source_obj_id " + bigIntDataType + ", " // object id of the source where this mapping was found.
+					+ "time " + bigIntDataType + ", " // time at which the mapping existed
+					+ "UNIQUE(host_id, addr_obj_id, time), "
+					+ "FOREIGN KEY(host_id) REFERENCES tsk_hosts(id) ON DELETE CASCADE, "
+					+ "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id), "
+					+ "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )");
+
+			// stores associations between DNS name and IP address
+			statement.execute("CREATE TABLE tsk_host_address_dns_ip_map (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "dns_address_id " + bigIntDataType + " NOT NULL, "
+					+ "ip_address_id " + bigIntDataType + " NOT NULL, "
+					+ "source_obj_id " + bigIntDataType + ", "
+					+ "time " + bigIntDataType + ", " // time at which the mapping existed
+					+ "UNIQUE(dns_address_id, ip_address_id, time), "
+					+ "FOREIGN KEY(dns_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, "
+					+ "FOREIGN KEY(ip_address_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE,"
+					+ "FOREIGN KEY(source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE SET NULL )");
+
+			// maps an address to an artifact using it 
+			statement.execute("CREATE TABLE tsk_host_address_usage (id " + primaryKeyType + " PRIMARY KEY, "
+					+ "addr_obj_id " + bigIntDataType + " NOT NULL, "
+					+ "obj_id " + bigIntDataType + " NOT NULL, "
+					+ "data_source_obj_id " + bigIntDataType + " NOT NULL, " // data source where the usage was found
+					+ "UNIQUE(addr_obj_id, obj_id), "
+					+ "FOREIGN KEY(addr_obj_id) REFERENCES tsk_host_addresses(id) ON DELETE CASCADE, "
+					+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE )");
+
+			return new CaseDbSchemaVersionNumber(9, 0);
+
+		} finally {
+			closeStatement(statement);
+			releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	private CaseDbSchemaVersionNumber updateFromSchema9dot0toSchema9dot1(CaseDbSchemaVersionNumber schemaVersion, CaseDbConnection connection) throws SQLException, TskCoreException {
+		if (schemaVersion.getMajor() != 9) {
+			return schemaVersion;
+		}
+
+		if (schemaVersion.getMinor() != 0) {
+			return schemaVersion;
+		}
+
+		Statement statement = connection.createStatement();
+		ResultSet results = null;
+		acquireSingleUserCaseWriteLock();
+		try {
+			// The 9.0 schema contained method_category columns that were renamed to priority.
+			switch (getDatabaseType()) {
+				case POSTGRESQL:
+					// Check if the misnamed column is present. We'll assume here that the column will exist
+					// in both tables if present in one.
+					results = statement.executeQuery("SELECT column_name FROM information_schema.columns "
+							+ "WHERE table_name='tsk_analysis_results' and column_name='method_category'");
+					if (results.next()) {
+						// In PostgreSQL we can delete the column
+						statement.execute("ALTER TABLE tsk_analysis_results "
+								+ "DROP COLUMN method_category");
+						statement.execute("ALTER TABLE tsk_aggregate_score "
+								+ "DROP COLUMN method_category");
+					}
+					break;
+				case SQLITE:
+					// Check if the misnamed column is present. We'll assume here that the column will exist
+					// in both tables if present in one.
+					boolean hasMisnamedColumn = false;
+					results = statement.executeQuery("pragma table_info('tsk_analysis_results')");
+					while (results.next()) {
+						if (results.getString("name") != null && results.getString("name").equals("method_category")) {
+							hasMisnamedColumn = true;
+							break;
+						}
+					}
+
+					if (hasMisnamedColumn) {
+						// Since we can't rename the column we'll need to make a new table and copy the data.
+						// We'll add the priority column later.
+						statement.execute("CREATE TABLE temp_tsk_analysis_results (artifact_obj_id INTEGER PRIMARY KEY, "
+								+ "conclusion TEXT, "
+								+ "significance INTEGER NOT NULL, "
+								+ "configuration TEXT, justification TEXT, "
+								+ "ignore_score INTEGER DEFAULT 0 " // boolean	
+								+ ")");
+						statement.execute("CREATE TABLE temp_tsk_aggregate_score( obj_id INTEGER PRIMARY KEY, "
+								+ "data_source_obj_id INTEGER, "
+								+ "significance INTEGER NOT NULL, "
+								+ "UNIQUE (obj_id),"
+								+ "FOREIGN KEY(obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE, "
+								+ "FOREIGN KEY(data_source_obj_id) REFERENCES tsk_objects(obj_id) ON DELETE CASCADE "
+								+ ")");
+						
+
+						// Copy the data
+						statement.execute("INSERT INTO temp_tsk_analysis_results(artifact_obj_id, "
+								+ "conclusion, justification, significance, configuration, ignore_score) "
+								+ "SELECT artifact_obj_id, conclusion, justification, significance, configuration, ignore_score FROM tsk_analysis_results");
+						statement.execute("INSERT INTO temp_tsk_aggregate_score(obj_id, "
+								+ "data_source_obj_id, significance) "
+								+ "SELECT obj_id, data_source_obj_id, significance FROM tsk_aggregate_score");
+
+						
+						
+						// Drop the old tables
+						statement.execute("DROP TABLE tsk_analysis_results");
+						statement.execute("DROP TABLE tsk_aggregate_score");
+						
+
+						// Rename the new tables
+						statement.execute("ALTER TABLE temp_tsk_analysis_results RENAME TO tsk_analysis_results");
+						statement.execute("ALTER TABLE temp_tsk_aggregate_score RENAME TO tsk_aggregate_score");
+						
+					}
+					break;
+				default:
+					throw new TskCoreException("Unsupported database type: " + getDatabaseType().toString());
+			}
+
+			// add an index on tsk_file_attributes table.
+			statement.execute("CREATE INDEX tsk_file_attributes_obj_id ON tsk_file_attributes(obj_id)");
+			
+			statement.execute("ALTER TABLE tsk_analysis_results ADD COLUMN priority INTEGER NOT NULL DEFAULT " + Score.Priority.NORMAL.getId());
+			statement.execute("ALTER TABLE tsk_aggregate_score ADD COLUMN priority INTEGER NOT NULL DEFAULT " + Score.Priority.NORMAL.getId());
+			
+			statement.execute("UPDATE blackboard_artifact_types SET category_type = 1 WHERE artifact_type_id = 16");
+			
+			return new CaseDbSchemaVersionNumber(9, 1);
+		} finally {
+			closeResultSet(results);
+			closeStatement(statement);
+			releaseSingleUserCaseWriteLock();
+		}
+	}
 
 	/**
 	 * Inserts a row for the given account type in account_types table, if one
@@ -2325,17 +2797,17 @@ public String getBackupDatabasePath() {
 	 * that is returned can be passed to methods that take a CaseDbTransaction.
 	 * The caller is responsible for calling either commit() or rollback() on
 	 * the transaction object.
-	 * 
+	 *
 	 * Note that this beginning the transaction also acquires the single user
-	 * case write lock, which will be automatically released when the transaction
-	 * is closed.
+	 * case write lock, which will be automatically released when the
+	 * transaction is closed.
 	 *
 	 * @return A CaseDbTransaction object.
 	 *
 	 * @throws TskCoreException
 	 */
 	public CaseDbTransaction beginTransaction() throws TskCoreException {
-		return new CaseDbTransaction(this, connections.getConnection());
+		return new CaseDbTransaction(this);
 	}
 
 	/**
@@ -2609,10 +3081,11 @@ public Examiner getCurrentExaminer() throws TskCoreException {
 			throw new TskCoreException("Failed to determine logged in user name.");
 		}
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
 		ResultSet resultSet = null;
+		CaseDbConnection connection = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_EXAMINER_BY_LOGIN_NAME);
 			statement.clearParameters();
 			statement.setString(1, loginName);
@@ -2628,7 +3101,7 @@ public Examiner getCurrentExaminer() throws TskCoreException {
 			throw new TskCoreException("Error getting examaminer for name = " + loginName, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -2645,10 +3118,11 @@ public Examiner getCurrentExaminer() throws TskCoreException {
 	 */
 	Examiner getExaminerById(long id) throws TskCoreException {
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_EXAMINER_BY_ID);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -2662,7 +3136,7 @@ Examiner getExaminerById(long id) throws TskCoreException {
 			throw new TskCoreException("Error getting examaminer for id = " + id, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -2697,11 +3171,12 @@ public AddImageProcess makeAddImageProcess(String timeZone, boolean addUnallocSp
 	 * @throws TskCoreException
 	 */
 	public List<Content> getRootObjects() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT obj_id, type FROM tsk_objects " //NON-NLS
 					+ "WHERE par_obj_id IS NULL"); //NON-NLS
@@ -2728,6 +3203,12 @@ public List<Content> getRootObjects() throws TskCoreException {
 							break;
 						case REPORT:
 							break;
+						case OS_ACCOUNT:
+							break;
+						case HOST_ADDRESS:
+							break;
+						case UNSUPPORTED:
+							break;
 						default:
 							throw new TskCoreException("Parentless object has wrong type to be a root: " + i.type);
 					}
@@ -2739,7 +3220,7 @@ public List<Content> getRootObjects() throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -2763,11 +3244,12 @@ List<Long> getDataSourceObjIds(String deviceId) throws TskCoreException {
 				return new ArrayList<Long>(deviceIdToDatasourceObjIdMap.get(deviceId));
 			}
 
-			CaseDbConnection connection = connections.getConnection();
-			acquireSingleUserCaseReadLock();
+			CaseDbConnection connection = null;
 			Statement s = null;
 			ResultSet rs = null;
+			acquireSingleUserCaseReadLock();
 			try {
+				connection = connections.getConnection();
 				s = connection.createStatement();
 				rs = connection.executeQuery(s, "SELECT obj_id FROM data_source_info WHERE device_id = '" + deviceId + "'"); //NON-NLS
 				List<Long> dataSourceObjIds = new ArrayList<Long>();
@@ -2788,7 +3270,7 @@ List<Long> getDataSourceObjIds(String deviceId) throws TskCoreException {
 			} finally {
 				closeResultSet(rs);
 				closeStatement(s);
-				connection.close();
+				closeConnection(connection);
 				releaseSingleUserCaseReadLock();
 			}
 		}
@@ -2811,13 +3293,14 @@ List<Long> getDataSourceObjIds(String deviceId) throws TskCoreException {
 	 * @throws TskCoreException if there is a problem getting the data sources.
 	 */
 	public List<DataSource> getDataSources() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement statement = null;
 		ResultSet resultSet = null;
 		Statement statement2 = null;
 		ResultSet resultSet2 = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			statement2 = connection.createStatement();
 			resultSet = connection.executeQuery(statement,
@@ -2890,7 +3373,7 @@ public List<DataSource> getDataSources() throws TskCoreException {
 			closeStatement(statement);
 			closeResultSet(resultSet2);
 			closeStatement(statement2);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -2916,13 +3399,14 @@ public List<DataSource> getDataSources() throws TskCoreException {
 	 */
 	public DataSource getDataSource(long objectId) throws TskDataException, TskCoreException {
 		DataSource dataSource = null;
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement statement = null;
 		ResultSet resultSet = null;
 		Statement statement2 = null;
 		ResultSet resultSet2 = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			statement2 = connection.createStatement();
 			resultSet = connection.executeQuery(statement,
@@ -2963,7 +3447,7 @@ public DataSource getDataSource(long objectId) throws TskDataException, TskCoreE
 					String sha256 = resultSet.getString("sha256");
 					String name = resultSet.getString("display_name");
 
-					List<String> imagePaths = getImagePathsById(objectId);
+					List<String> imagePaths = getImagePathsById(objectId, connection);
 					if (name == null) {
 						if (imagePaths.size() > 0) {
 							String path = imagePaths.get(0);
@@ -2986,7 +3470,7 @@ public DataSource getDataSource(long objectId) throws TskDataException, TskCoreE
 			closeStatement(statement);
 			closeResultSet(resultSet2);
 			closeStatement(statement2);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -3018,10 +3502,12 @@ public ArrayList<BlackboardArtifact> getBlackboardArtifacts(int artifactTypeID)
 	 * @throws TskCoreException
 	 */
 	public long getBlackboardArtifactsCount(long objId) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE obj_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_FROM_SOURCE);
 			statement.clearParameters();
@@ -3036,7 +3522,7 @@ public long getBlackboardArtifactsCount(long objId) throws TskCoreException {
 			throw new TskCoreException("Error getting number of blackboard artifacts by content", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3052,10 +3538,12 @@ public long getBlackboardArtifactsCount(long objId) throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	public long getBlackboardArtifactsTypeCount(int artifactTypeID) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE artifact_type_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_OF_TYPE);
 			statement.clearParameters();
@@ -3070,7 +3558,7 @@ public long getBlackboardArtifactsTypeCount(int artifactTypeID) throws TskCoreEx
 			throw new TskCoreException("Error getting number of blackboard artifacts by type", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3087,10 +3575,12 @@ public long getBlackboardArtifactsTypeCount(int artifactTypeID) throws TskCoreEx
 	 * @throws TskCoreException
 	 */
 	public long getBlackboardArtifactsTypeCount(int artifactTypeID, long dataSourceID) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE artifact_type_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_OF_TYPE_BY_DATA_SOURCE);
 			statement.clearParameters();
@@ -3106,7 +3596,7 @@ public long getBlackboardArtifactsTypeCount(int artifactTypeID, long dataSourceI
 			throw new TskCoreException(String.format("Error getting number of blackboard artifacts by type (%d) and data source (%d)", artifactTypeID, dataSourceID), ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3126,11 +3616,12 @@ public long getBlackboardArtifactsTypeCount(int artifactTypeID, long dataSourceI
 	 *                          queried
 	 */
 	public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, String value) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ "arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3144,7 +3635,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());	 //NON-NLS
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3154,7 +3646,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3181,11 +3673,12 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		if (startsWith == false) {
 			valSubStr += "%"; //NON-NLS
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, " //NON-NLS
@@ -3199,7 +3692,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3209,7 +3703,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3229,11 +3723,12 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 	 *                          queried
 	 */
 	public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, int value) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3247,7 +3742,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3257,7 +3753,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3277,11 +3773,12 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 	 *                          queried
 	 */
 	public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, long value) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3295,7 +3792,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3305,7 +3803,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3325,11 +3823,12 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 	 *                          queried
 	 */
 	public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, double value) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3343,7 +3842,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3353,7 +3853,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3373,11 +3873,12 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 	 *                          queried
 	 */
 	public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRIBUTE_TYPE attrType, byte value) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ " arts.obj_id AS obj_id, arts.artifact_obj_id AS artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3391,7 +3892,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3401,7 +3903,7 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3414,17 +3916,19 @@ public List<BlackboardArtifact> getBlackboardArtifacts(BlackboardAttribute.ATTRI
 	 * @throws TskCoreException when there is an error getting the types
 	 */
 	public Iterable<BlackboardArtifact.Type> getArtifactTypes() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name FROM blackboard_artifact_types"); //NON-NLS
+			rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name, category_type FROM blackboard_artifact_types"); //NON-NLS
 			ArrayList<BlackboardArtifact.Type> artifactTypes = new ArrayList<BlackboardArtifact.Type>();
 			while (rs.next()) {
 				artifactTypes.add(new BlackboardArtifact.Type(rs.getInt("artifact_type_id"),
-						rs.getString("type_name"), rs.getString("display_name")));
+						rs.getString("type_name"), rs.getString("display_name"), 
+						BlackboardArtifact.Category.fromID(rs.getInt("category_type"))));
 			}
 			return artifactTypes;
 		} catch (SQLException ex) {
@@ -3432,7 +3936,7 @@ public Iterable<BlackboardArtifact.Type> getArtifactTypes() throws TskCoreExcept
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3455,11 +3959,12 @@ public ArrayList<BlackboardArtifact.ARTIFACT_TYPE> getBlackboardArtifactTypesInU
 		}
 		String query = "SELECT DISTINCT artifact_type_id FROM blackboard_artifacts "
 				+ "WHERE artifact_type_id IN (" + typeIdList + ")";
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, query);
 			ArrayList<BlackboardArtifact.ARTIFACT_TYPE> usedArts = new ArrayList<BlackboardArtifact.ARTIFACT_TYPE>();
@@ -3472,7 +3977,7 @@ public ArrayList<BlackboardArtifact.ARTIFACT_TYPE> getBlackboardArtifactTypesInU
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3488,22 +3993,26 @@ public ArrayList<BlackboardArtifact.ARTIFACT_TYPE> getBlackboardArtifactTypesInU
 	 *                          within tsk core
 	 */
 	public List<BlackboardArtifact.Type> getArtifactTypesInUse() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s,
 					"SELECT DISTINCT arts.artifact_type_id AS artifact_type_id, "
-					+ "types.type_name AS type_name, types.display_name AS display_name "
+					+ "types.type_name AS type_name, "
+					+ "types.display_name AS display_name, "
+					+ "types.category_type AS category_type "
 					+ "FROM blackboard_artifact_types AS types "
 					+ "INNER JOIN blackboard_artifacts AS arts "
 					+ "ON arts.artifact_type_id = types.artifact_type_id"); //NON-NLS
 			List<BlackboardArtifact.Type> uniqueArtifactTypes = new ArrayList<BlackboardArtifact.Type>();
 			while (rs.next()) {
 				uniqueArtifactTypes.add(new BlackboardArtifact.Type(rs.getInt("artifact_type_id"),
-						rs.getString("type_name"), rs.getString("display_name")));
+						rs.getString("type_name"), rs.getString("display_name"), 
+						BlackboardArtifact.Category.fromID(rs.getInt("category_type"))));
 			}
 			return uniqueArtifactTypes;
 		} catch (SQLException ex) {
@@ -3511,7 +4020,7 @@ public List<BlackboardArtifact.Type> getArtifactTypesInUse() throws TskCoreExcep
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3524,11 +4033,12 @@ public List<BlackboardArtifact.Type> getArtifactTypesInUse() throws TskCoreExcep
 	 * @throws TskCoreException when there is an error getting the types
 	 */
 	public List<BlackboardAttribute.Type> getAttributeTypes() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT attribute_type_id, type_name, display_name, value_type FROM blackboard_attribute_types"); //NON-NLS
 			ArrayList<BlackboardAttribute.Type> attribute_types = new ArrayList<BlackboardAttribute.Type>();
@@ -3542,7 +4052,7 @@ public List<BlackboardAttribute.Type> getAttributeTypes() throws TskCoreExceptio
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3559,11 +4069,12 @@ public List<BlackboardAttribute.Type> getAttributeTypes() throws TskCoreExceptio
 	 *                          within TSK core
 	 */
 	public int getBlackboardAttributeTypesCount() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM blackboard_attribute_types"); //NON-NLS
 			int count = 0;
@@ -3576,7 +4087,7 @@ public int getBlackboardAttributeTypesCount() throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3594,11 +4105,13 @@ public int getBlackboardAttributeTypesCount() throws TskCoreException {
 	 *                          database.
 	 */
 	ArrayList<BlackboardArtifact> getArtifactsHelper(String whereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
+		Statement statement = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
-			Statement statement = connection.createStatement();
+			connection = connections.getConnection();
+			statement = connection.createStatement();
 			String query = "SELECT blackboard_artifacts.artifact_id AS artifact_id, "
 					+ "blackboard_artifacts.obj_id AS obj_id, "
 					+ "blackboard_artifacts.artifact_obj_id AS artifact_obj_id, "
@@ -3614,7 +4127,8 @@ ArrayList<BlackboardArtifact> getArtifactsHelper(String whereClause) throws TskC
 			rs = connection.executeQuery(statement, query);
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3623,7 +4137,8 @@ ArrayList<BlackboardArtifact> getArtifactsHelper(String whereClause) throws TskC
 			throw new TskCoreException("Error getting or creating a blackboard artifact", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeStatement(statement);
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3641,10 +4156,12 @@ ArrayList<BlackboardArtifact> getArtifactsHelper(String whereClause) throws TskC
 	 *                          within TSK core
 	 */
 	private long getArtifactsCountHelper(int artifactTypeID, long obj_id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(*) AS count FROM blackboard_artifacts WHERE obj_id = ? AND artifact_type_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_BY_SOURCE_AND_TYPE);
 			statement.clearParameters();
@@ -3660,7 +4177,7 @@ private long getArtifactsCountHelper(int artifactTypeID, long obj_id) throws Tsk
 			throw new TskCoreException("Error getting blackboard artifact count", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3809,11 +4326,12 @@ public ArrayList<BlackboardArtifact> getBlackboardArtifacts(ARTIFACT_TYPE artifa
 	 *                          within TSK core
 	 */
 	public List<BlackboardArtifact> getBlackboardArtifacts(ARTIFACT_TYPE artifactType, BlackboardAttribute.ATTRIBUTE_TYPE attrType, String value) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT DISTINCT arts.artifact_id AS artifact_id, " //NON-NLS
 					+ "arts.obj_id AS obj_id, arts.artifact_obj_id as artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3828,7 +4346,8 @@ public List<BlackboardArtifact> getBlackboardArtifacts(ARTIFACT_TYPE artifactTyp
 					+ " AND arts.review_status_id !=" + BlackboardArtifact.ReviewStatus.REJECTED.getID());
 			ArrayList<BlackboardArtifact> artifacts = new ArrayList<BlackboardArtifact>();
 			while (rs.next()) {
-				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				artifacts.add(new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id"))));
 			}
@@ -3838,15 +4357,16 @@ public List<BlackboardArtifact> getBlackboardArtifacts(ARTIFACT_TYPE artifactTyp
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
 
 	/**
-	 * Get the blackboard artifact with the given artifact id
+	 * Get the blackboard artifact with the given artifact id (artifact_id in
+	 * blackboard_artifacts)
 	 *
-	 * @param artifactID artifact ID
+	 * @param artifactID artifact ID (artifact_id column)
 	 *
 	 * @return blackboard artifact
 	 *
@@ -3854,11 +4374,12 @@ public List<BlackboardArtifact> getBlackboardArtifacts(ARTIFACT_TYPE artifactTyp
 	 *                          within TSK core
 	 */
 	public BlackboardArtifact getBlackboardArtifact(long artifactID) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
+		Statement s = null;
 		ResultSet rs = null;
-		Statement s;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT arts.artifact_id AS artifact_id, "
 					+ "arts.obj_id AS obj_id, arts.artifact_obj_id as artifact_obj_id, arts.data_source_obj_id AS data_source_obj_id, arts.artifact_type_id AS artifact_type_id, "
@@ -3868,7 +4389,8 @@ public BlackboardArtifact getBlackboardArtifact(long artifactID) throws TskCoreE
 					+ "WHERE arts.artifact_id = " + artifactID
 					+ " AND arts.artifact_type_id = types.artifact_type_id");
 			if (rs.next()) {
-				return new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				return new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						rs.getInt("artifact_type_id"), rs.getString("type_name"), rs.getString("display_name"),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id")));
 			} else {
@@ -3883,7 +4405,7 @@ public BlackboardArtifact getBlackboardArtifact(long artifactID) throws TskCoreE
 			throw new TskCoreException("Error getting a blackboard artifact. " + ex.getMessage(), ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -3897,14 +4419,12 @@ public BlackboardArtifact getBlackboardArtifact(long artifactID) throws TskCoreE
 	 * @throws TskCoreException thrown if a critical error occurs.
 	 */
 	public void addBlackboardAttribute(BlackboardAttribute attr, int artifactTypeId) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			addBlackBoardAttribute(attr, artifactTypeId, connection);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error adding blackboard attribute " + attr.toString(), ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -3919,24 +4439,25 @@ public void addBlackboardAttribute(BlackboardAttribute attr, int artifactTypeId)
 	 * @throws TskCoreException thrown if a critical error occurs.
 	 */
 	public void addBlackboardAttributes(Collection<BlackboardAttribute> attributes, int artifactTypeId) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		acquireSingleUserCaseWriteLock();
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 			for (final BlackboardAttribute attr : attributes) {
 				addBlackBoardAttribute(attr, artifactTypeId, connection);
 			}
 			connection.commitTransaction();
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException("Error adding blackboard attributes", ex);
 		} finally {
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
 
-	private void addBlackBoardAttribute(BlackboardAttribute attr, int artifactTypeId, CaseDbConnection connection) throws SQLException, TskCoreException {
+	void addBlackBoardAttribute(BlackboardAttribute attr, int artifactTypeId, CaseDbConnection connection) throws SQLException, TskCoreException {
 		PreparedStatement statement;
 		switch (attr.getAttributeType().getValueType()) {
 			case STRING:
@@ -3982,6 +4503,57 @@ private void addBlackBoardAttribute(BlackboardAttribute attr, int artifactTypeId
 		connection.executeUpdate(statement);
 	}
 
+	void addFileAttribute(Attribute attr, CaseDbConnection connection) throws SQLException, TskCoreException {
+		PreparedStatement statement;
+		statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_ATTRIBUTE, Statement.RETURN_GENERATED_KEYS);
+		statement.clearParameters();
+
+		statement.setLong(1, attr.getAttributeParentId());
+		statement.setInt(2, attr.getAttributeType().getTypeID());
+		statement.setLong(3, attr.getAttributeType().getValueType().getType());
+
+		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.BYTE) {
+			statement.setBytes(4, attr.getValueBytes());
+		} else {
+			statement.setBytes(4, null);
+		}
+
+		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING
+				|| attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.JSON) {
+			statement.setString(5, attr.getValueString());
+		} else {
+			statement.setString(5, null);
+		}
+		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.INTEGER) {
+			statement.setInt(6, attr.getValueInt());
+		} else {
+			statement.setNull(6, java.sql.Types.INTEGER);
+		}
+
+		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME
+				|| attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.LONG) {
+			statement.setLong(7, attr.getValueLong());
+		} else {
+			statement.setNull(7, java.sql.Types.BIGINT);
+		}
+
+		if (attr.getAttributeType().getValueType() == TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DOUBLE) {
+			statement.setDouble(8, attr.getValueDouble());
+		} else {
+			statement.setNull(8, java.sql.Types.DOUBLE);
+		}
+
+		connection.executeUpdate(statement);
+		try (ResultSet resultSet = statement.getGeneratedKeys()) {
+			if (!resultSet.next()) {
+				throw new TskCoreException(String.format("Failed to insert file attribute "
+						+ "with id=%d. The expected key was not generated", attr.getId()));
+			}
+
+			attr.setId(resultSet.getLong(1));
+		}
+	}
+
 	/**
 	 * Adds a source name to the source column of one or more rows in the
 	 * blackboard attributes table. The source name will be added to a CSV list
@@ -4003,13 +4575,14 @@ String addSourceToArtifactAttribute(BlackboardAttribute attr, String source) thr
 		if (null == source || source.isEmpty()) {
 			throw new TskCoreException("Attempt to add null or empty source module name to artifact attribute");
 		}
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		acquireSingleUserCaseWriteLock();
 		Statement queryStmt = null;
 		Statement updateStmt = null;
 		ResultSet result = null;
 		String newSources = "";
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 			String valueClause = "";
 			BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType = attr.getAttributeType().getValueType();
@@ -4090,13 +4663,13 @@ String addSourceToArtifactAttribute(BlackboardAttribute attr, String source) thr
 			connection.commitTransaction();
 			return newSources;
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException(String.format("Error adding source module to attribute %s", attr.getDisplayString()), ex);
 		} finally {
 			closeResultSet(result);
 			closeStatement(updateStmt);
 			closeStatement(queryStmt);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -4116,11 +4689,12 @@ String addSourceToArtifactAttribute(BlackboardAttribute attr, String source) thr
 	 *                          in the system
 	 */
 	public BlackboardAttribute.Type addArtifactAttributeType(String attrTypeString, TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE valueType, String displayName) throws TskCoreException, TskDataException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		acquireSingleUserCaseWriteLock();
 		Statement s = null;
 		ResultSet rs = null;
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT attribute_type_id FROM blackboard_attribute_types WHERE type_name = '" + attrTypeString + "'"); //NON-NLS
@@ -4147,12 +4721,12 @@ public BlackboardAttribute.Type addArtifactAttributeType(String attrTypeString,
 			}
 
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException("Error adding attribute type", ex);
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -4171,11 +4745,12 @@ public BlackboardAttribute.Type getAttributeType(String attrTypeName) throws Tsk
 		if (this.typeNameToAttributeTypeMap.containsKey(attrTypeName)) {
 			return this.typeNameToAttributeTypeMap.get(attrTypeName);
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT attribute_type_id, type_name, display_name, value_type FROM blackboard_attribute_types WHERE type_name = '" + attrTypeName + "'"); //NON-NLS
 			BlackboardAttribute.Type type = null;
@@ -4191,7 +4766,7 @@ public BlackboardAttribute.Type getAttributeType(String attrTypeName) throws Tsk
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4206,15 +4781,16 @@ public BlackboardAttribute.Type getAttributeType(String attrTypeName) throws Tsk
 	 * @throws TskCoreException If an error occurs accessing the case database.
 	 *
 	 */
-	private BlackboardAttribute.Type getAttributeType(int typeID) throws TskCoreException {
+	BlackboardAttribute.Type getAttributeType(int typeID) throws TskCoreException {
 		if (this.typeIdToAttributeTypeMap.containsKey(typeID)) {
 			return this.typeIdToAttributeTypeMap.get(typeID);
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT attribute_type_id, type_name, display_name, value_type FROM blackboard_attribute_types WHERE attribute_type_id = " + typeID + ""); //NON-NLS
 			BlackboardAttribute.Type type = null;
@@ -4230,7 +4806,7 @@ private BlackboardAttribute.Type getAttributeType(int typeID) throws TskCoreExce
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4249,17 +4825,19 @@ public BlackboardArtifact.Type getArtifactType(String artTypeName) throws TskCor
 		if (this.typeNameToArtifactTypeMap.containsKey(artTypeName)) {
 			return this.typeNameToArtifactTypeMap.get(artTypeName);
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name FROM blackboard_artifact_types WHERE type_name = '" + artTypeName + "'"); //NON-NLS
+			rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name, category_type FROM blackboard_artifact_types WHERE type_name = '" + artTypeName + "'"); //NON-NLS
 			BlackboardArtifact.Type type = null;
 			if (rs.next()) {
 				type = new BlackboardArtifact.Type(rs.getInt("artifact_type_id"),
-						rs.getString("type_name"), rs.getString("display_name"));
+						rs.getString("type_name"), rs.getString("display_name"), 
+						BlackboardArtifact.Category.fromID(rs.getInt("category_type")));
 				this.typeIdToArtifactTypeMap.put(type.getTypeID(), type);
 				this.typeNameToArtifactTypeMap.put(artTypeName, type);
 			}
@@ -4269,7 +4847,7 @@ public BlackboardArtifact.Type getArtifactType(String artTypeName) throws TskCor
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4279,36 +4857,41 @@ public BlackboardArtifact.Type getArtifactType(String artTypeName) throws TskCor
 	 *
 	 * @param artTypeId An artifact type id.
 	 *
-	 * @return An artifact type or null if the artifact type does not exist.
+	 * @return The artifact type.
 	 *
-	 * @throws TskCoreException If an error occurs accessing the case database.
+	 * @throws TskCoreException If an error occurs accessing the case database 
+	 *						    or no value is found.
 	 *
 	 */
 	BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException {
 		if (this.typeIdToArtifactTypeMap.containsKey(artTypeId)) {
 			return typeIdToArtifactTypeMap.get(artTypeId);
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name FROM blackboard_artifact_types WHERE artifact_type_id = " + artTypeId + ""); //NON-NLS
+			rs = connection.executeQuery(s, "SELECT artifact_type_id, type_name, display_name, category_type FROM blackboard_artifact_types WHERE artifact_type_id = " + artTypeId + ""); //NON-NLS
 			BlackboardArtifact.Type type = null;
 			if (rs.next()) {
 				type = new BlackboardArtifact.Type(rs.getInt("artifact_type_id"),
-						rs.getString("type_name"), rs.getString("display_name"));
+						rs.getString("type_name"), rs.getString("display_name"), 
+						BlackboardArtifact.Category.fromID(rs.getInt("category_type")));
 				this.typeIdToArtifactTypeMap.put(artTypeId, type);
 				this.typeNameToArtifactTypeMap.put(type.getTypeName(), type);
+				return type;
+			} else {
+				throw new TskCoreException("No artifact type found matching id: " + artTypeId);
 			}
-			return type;
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting artifact type from the database", ex);
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4316,6 +4899,9 @@ BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException {
 	/**
 	 * Add an artifact type with the given name. Will return an artifact Type.
 	 *
+	 * This assumes that the artifact type being added has the category
+	 * DATA_ARTIFACT.
+	 *
 	 * @param artifactTypeName System (unique) name of artifact
 	 * @param displayName      Display (non-unique) name of artifact
 	 *
@@ -4326,11 +4912,32 @@ BlackboardArtifact.Type getArtifactType(int artTypeId) throws TskCoreException {
 	 *                          within tsk core
 	 */
 	public BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName, String displayName) throws TskCoreException, TskDataException {
-		CaseDbConnection connection = connections.getConnection();
+
+		return addBlackboardArtifactType(artifactTypeName, displayName, BlackboardArtifact.Category.DATA_ARTIFACT);
+	}
+
+	/**
+	 * Add an artifact type with the given name and category. Will return an
+	 * artifact Type.
+	 *
+	 * @param artifactTypeName System (unique) name of artifact
+	 * @param displayName      Display (non-unique) name of artifact
+	 * @param category		       Artifact type category.
+	 *
+	 *
+	 * @return Type of the artifact added.
+	 *
+	 * @throws TskCoreException exception thrown if a critical error occurs
+	 * @throws TskDataException exception thrown if given data is already in db
+	 *                          within tsk core
+	 */
+	BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName, String displayName, BlackboardArtifact.Category category) throws TskCoreException, TskDataException {
+		CaseDbConnection connection = null;
 		acquireSingleUserCaseWriteLock();
 		Statement s = null;
 		ResultSet rs = null;
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT artifact_type_id FROM blackboard_artifact_types WHERE type_name = '" + artifactTypeName + "'"); //NON-NLS
@@ -4346,8 +4953,8 @@ public BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName
 						maxID++;
 					}
 				}
-				connection.executeUpdate(s, "INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name) VALUES ('" + maxID + "', '" + artifactTypeName + "', '" + displayName + "')"); //NON-NLS
-				BlackboardArtifact.Type type = new BlackboardArtifact.Type(maxID, artifactTypeName, displayName);
+				connection.executeUpdate(s, "INSERT INTO blackboard_artifact_types (artifact_type_id, type_name, display_name, category_type) VALUES ('" + maxID + "', '" + artifactTypeName + "', '" + displayName + "', " + category.getID() + " )"); //NON-NLS
+				BlackboardArtifact.Type type = new BlackboardArtifact.Type(maxID, artifactTypeName, displayName, category);
 				this.typeIdToArtifactTypeMap.put(type.getTypeID(), type);
 				this.typeNameToArtifactTypeMap.put(type.getTypeName(), type);
 				connection.commitTransaction();
@@ -4356,22 +4963,24 @@ public BlackboardArtifact.Type addBlackboardArtifactType(String artifactTypeName
 				throw new TskDataException("The attribute type that was added was already within the system.");
 			}
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException("Error adding artifact type", ex);
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
 
 	public ArrayList<BlackboardAttribute> getBlackboardAttributes(final BlackboardArtifact artifact) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
+		Statement statement = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
-			Statement statement = connection.createStatement();
+			connection = connections.getConnection();	
+			statement = connection.createStatement();
 			rs = connection.executeQuery(statement, "SELECT attrs.artifact_id AS artifact_id, "
 					+ "attrs.source AS source, attrs.context AS context, attrs.attribute_type_id AS attribute_type_id, "
 					+ "attrs.value_type AS value_type, attrs.value_byte AS value_byte, "
@@ -4414,7 +5023,74 @@ public ArrayList<BlackboardAttribute> getBlackboardAttributes(final BlackboardAr
 			throw new TskCoreException("Error getting attributes for artifact, artifact id = " + artifact.getArtifactID(), ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeStatement(statement);
+			closeConnection(connection);
+			releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get the attributes associated with the given file.
+	 *
+	 * @param file
+	 *
+	 * @return
+	 *
+	 * @throws TskCoreException
+	 */
+	ArrayList<Attribute> getFileAttributes(final AbstractFile file) throws TskCoreException {
+		CaseDbConnection connection = null;
+		Statement statement = null;
+		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
+		try {
+			connection = connections.getConnection();
+			statement = connection.createStatement();
+			rs = connection.executeQuery(statement, "SELECT attrs.id as id,  attrs.obj_id AS obj_id, "
+					+ "attrs.attribute_type_id AS attribute_type_id, "
+					+ "attrs.value_type AS value_type, attrs.value_byte AS value_byte, "
+					+ "attrs.value_text AS value_text, attrs.value_int32 AS value_int32, "
+					+ "attrs.value_int64 AS value_int64, attrs.value_double AS value_double, "
+					+ "types.type_name AS type_name, types.display_name AS display_name "
+					+ "FROM tsk_file_attributes AS attrs "
+					+ " INNER JOIN blackboard_attribute_types AS types "
+					+ " ON attrs.attribute_type_id = types.attribute_type_id "
+					+ " WHERE attrs.obj_id = " + file.getId());
+
+			ArrayList<Attribute> attributes = new ArrayList<Attribute>();
+			while (rs.next()) {
+				int attributeTypeId = rs.getInt("attribute_type_id");
+				String attributeTypeName = rs.getString("type_name");
+				BlackboardAttribute.Type attributeType;
+				if (this.typeIdToAttributeTypeMap.containsKey(attributeTypeId)) {
+					attributeType = this.typeIdToAttributeTypeMap.get(attributeTypeId);
+				} else {
+					attributeType = new BlackboardAttribute.Type(attributeTypeId, attributeTypeName,
+							rs.getString("display_name"),
+							BlackboardAttribute.TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.fromType(rs.getInt("value_type")));
+					this.typeIdToAttributeTypeMap.put(attributeTypeId, attributeType);
+					this.typeNameToAttributeTypeMap.put(attributeTypeName, attributeType);
+				}
+
+				final Attribute attr = new Attribute(
+						rs.getLong("id"),
+						rs.getLong("obj_id"),
+						attributeType,
+						rs.getInt("value_int32"),
+						rs.getLong("value_int64"),
+						rs.getDouble("value_double"),
+						rs.getString("value_text"),
+						rs.getBytes("value_byte"), this
+				);
+				attributes.add(attr);
+			}
+			return attributes;
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting attributes for file, file id = " + file.getId(), ex);
+		} finally {
+			closeResultSet(rs);
+			closeStatement(statement);
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4432,11 +5108,12 @@ public ArrayList<BlackboardAttribute> getBlackboardAttributes(final BlackboardAr
 	 *                          within tsk core \ref query_database_page
 	 */
 	public ArrayList<BlackboardAttribute> getMatchingAttributes(String whereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT blackboard_attributes.artifact_id AS artifact_id, "
 					+ "blackboard_attributes.source AS source, blackboard_attributes.context AS context, "
@@ -4469,7 +5146,7 @@ public ArrayList<BlackboardAttribute> getMatchingAttributes(String whereClause)
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4486,11 +5163,12 @@ public ArrayList<BlackboardAttribute> getMatchingAttributes(String whereClause)
 	 *                          within tsk core \ref query_database_page
 	 */
 	public ArrayList<BlackboardArtifact> getMatchingArtifacts(String whereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
-		ResultSet rs = null;
+		CaseDbConnection connection = null;
 		Statement s = null;
+		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT blackboard_artifacts.artifact_id AS artifact_id, "
 					+ "blackboard_artifacts.obj_id AS obj_id, blackboard_artifacts.artifact_obj_id AS artifact_obj_id, blackboard_artifacts.data_source_obj_id AS data_source_obj_id, blackboard_artifacts.artifact_type_id AS artifact_type_id, "
@@ -4501,7 +5179,8 @@ public ArrayList<BlackboardArtifact> getMatchingArtifacts(String whereClause) th
 				BlackboardArtifact.Type type;
 				// artifact type is cached, so this does not necessarily call to the db
 				type = this.getArtifactType(rs.getInt("artifact_type_id"));
-				BlackboardArtifact artifact = new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"), rs.getLong("data_source_obj_id"),
+				BlackboardArtifact artifact = new BlackboardArtifact(this, rs.getLong("artifact_id"), rs.getLong("obj_id"), rs.getLong("artifact_obj_id"),
+						rs.getObject("data_source_obj_id") != null ? rs.getLong("data_source_obj_id") : null,
 						type.getTypeID(), type.getTypeName(), type.getDisplayName(),
 						BlackboardArtifact.ReviewStatus.withID(rs.getInt("review_status_id")));
 				matches.add(artifact);
@@ -4512,7 +5191,7 @@ public ArrayList<BlackboardArtifact> getMatchingArtifacts(String whereClause) th
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4529,10 +5208,36 @@ public ArrayList<BlackboardArtifact> getMatchingArtifacts(String whereClause) th
 	 *
 	 * @throws TskCoreException exception thrown if a critical error occurs
 	 *                          within tsk core
+	 * @deprecated Please use newDataArtifact or newAnalysisResult.
 	 */
+	@Deprecated
 	public BlackboardArtifact newBlackboardArtifact(int artifactTypeID, long obj_id) throws TskCoreException {
 		BlackboardArtifact.Type type = getArtifactType(artifactTypeID);
-		return newBlackboardArtifact(artifactTypeID, obj_id, type.getTypeName(), type.getDisplayName());
+		if (type == null) {
+			throw new TskCoreException("Unknown artifact type for id: " + artifactTypeID);
+		}
+		
+		Category category = type.getCategory();
+		if (category == null) {
+			throw new TskCoreException(String.format("No category for %s (id: %d)", 
+					type.getDisplayName() == null ? "<null>" : type.getDisplayName(),
+					type.getTypeID()));
+		}
+		
+		Content content = getContentById(obj_id);
+		if (content == null) {
+			throw new TskCoreException("No content found for object id: " + obj_id);
+		}
+		
+		switch (category) {
+			case ANALYSIS_RESULT: 
+				return content.newAnalysisResult(type, Score.SCORE_UNKNOWN, null, null, null, Collections.emptyList())
+						.getAnalysisResult();
+			case DATA_ARTIFACT:
+				return content.newDataArtifact(type, Collections.emptyList());
+			default:
+				throw new TskCoreException("Unknown category type: " + category.getName());
+		}
 	}
 
 	/**
@@ -4545,59 +5250,77 @@ public BlackboardArtifact newBlackboardArtifact(int artifactTypeID, long obj_id)
 	 *
 	 * @throws TskCoreException exception thrown if a critical error occurs
 	 *                          within tsk core
+	 * @deprecated Please use newDataArtifact or newAnalysisResult.
 	 */
+	@Deprecated
+	@SuppressWarnings("deprecation")
 	public BlackboardArtifact newBlackboardArtifact(ARTIFACT_TYPE artifactType, long obj_id) throws TskCoreException {
-		return newBlackboardArtifact(artifactType.getTypeID(), obj_id, artifactType.getLabel(), artifactType.getDisplayName());
+		return newBlackboardArtifact(artifactType.getTypeID(), obj_id);
 	}
-	
+
 	/**
 	 * Add a new blackboard artifact with the given type.
 	 *
-	 * @param artifactType the type the given artifact should have
-	 * @param obj_id       the content object id associated with this artifact
-	 * @param data_source_obj_id The data source obj id associated with this artifact
+	 * @param artifactType       the type the given artifact should have
+	 * @param obj_id             the content object id associated with this
+	 *                           artifact
+	 * @param data_source_obj_id The data source obj id associated with this
+	 *                           artifact
 	 *
 	 * @return a new blackboard artifact
 	 *
 	 * @throws TskCoreException exception thrown if a critical error occurs
 	 *                          within tsk core
+	 * @deprecated Please use newDataArtifact or newAnalysisResult.
 	 */
+	@Deprecated
+	@SuppressWarnings("deprecation")
 	BlackboardArtifact newBlackboardArtifact(int artifactTypeID, long obj_id, long data_source_obj_id) throws TskCoreException {
 		BlackboardArtifact.Type type = getArtifactType(artifactTypeID);
-		return newBlackboardArtifact(artifactTypeID, obj_id, type.getTypeName(), type.getDisplayName(), data_source_obj_id);
+		try (CaseDbConnection connection = connections.getConnection()) {
+			return newBlackboardArtifact(artifactTypeID, obj_id, type.getTypeName(), type.getDisplayName(), data_source_obj_id, connection);
+		}
 	}
 
+	@Deprecated
 	private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName) throws TskCoreException {
 		try (CaseDbConnection connection = connections.getConnection()) {
 			long data_source_obj_id = getDataSourceObjectId(connection, obj_id);
-			return this.newBlackboardArtifact(artifact_type_id, obj_id, artifactTypeName, artifactDisplayName, data_source_obj_id);
+			return this.newBlackboardArtifact(artifact_type_id, obj_id, artifactTypeName, artifactDisplayName, data_source_obj_id, connection);
+		}
+	}
+
+	PreparedStatement createInsertArtifactStatement(int artifact_type_id, long obj_id, long artifact_obj_id, long data_source_obj_id, CaseDbConnection connection) throws TskCoreException, SQLException {
+
+		PreparedStatement statement;
+		if (dbType == DbType.POSTGRESQL) {
+			statement = connection.getPreparedStatement(PREPARED_STATEMENT.POSTGRESQL_INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
+			statement.clearParameters();
+			statement.setLong(1, obj_id);
+			statement.setLong(2, artifact_obj_id);
+			statement.setLong(3, data_source_obj_id);
+			statement.setInt(4, artifact_type_id);
+		} else {
+			statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
+			statement.clearParameters();
+			this.nextArtifactId++;
+			statement.setLong(1, this.nextArtifactId);
+			statement.setLong(2, obj_id);
+			statement.setLong(3, artifact_obj_id);
+			statement.setLong(4, data_source_obj_id);
+			statement.setInt(5, artifact_type_id);
 		}
+
+		return statement;
 	}
 
-	private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName, long data_source_obj_id) throws TskCoreException {
+	@Deprecated
+	BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_id, String artifactTypeName, String artifactDisplayName, long data_source_obj_id, CaseDbConnection connection) throws TskCoreException {
 		acquireSingleUserCaseWriteLock();
-		try (CaseDbConnection connection = connections.getConnection()) {
+		try {
 			long artifact_obj_id = addObject(obj_id, TskData.ObjectType.ARTIFACT.getObjectType(), connection);
-			PreparedStatement statement = null;
-			if (dbType == DbType.POSTGRESQL) {
-				statement = connection.getPreparedStatement(PREPARED_STATEMENT.POSTGRESQL_INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
-				statement.clearParameters();
-				statement.setLong(1, obj_id);
-				statement.setLong(2, artifact_obj_id);
-				statement.setLong(3, data_source_obj_id);
-				statement.setInt(4, artifact_type_id);
-
-			} else {
-				statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ARTIFACT, Statement.RETURN_GENERATED_KEYS);
-				statement.clearParameters();
-				this.nextArtifactId++;
-				statement.setLong(1, this.nextArtifactId);
-				statement.setLong(2, obj_id);
-				statement.setLong(3, artifact_obj_id);
-				statement.setLong(4, data_source_obj_id);
-				statement.setInt(5, artifact_type_id);
+			PreparedStatement statement = createInsertArtifactStatement(artifact_type_id, obj_id, artifact_obj_id, data_source_obj_id, connection);
 
-			}
 			connection.executeUpdate(statement);
 			try (ResultSet resultSet = statement.getGeneratedKeys()) {
 				resultSet.next();
@@ -4611,6 +5334,84 @@ private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_
 		}
 	}
 
+	/**
+	 * Creates a new analysis result by inserting a row in the artifacts table
+	 * and a corresponding row in the tsk_analysis_results table.
+	 *
+	 * @param artifactType    Analysis result artifact type.
+	 * @param objId           Object id of parent.
+	 * @param dataSourceObjId Data source object id, may be null.
+	 * @param score           Score.
+	 * @param conclusion      Conclusion, may be null or an empty string.
+	 * @param configuration   Configuration used by analysis, may be null or an
+	 *                        empty string.
+	 * @param justification   Justification, may be null or an empty string.
+	 * @param connection      Database connection to use.
+	 *
+	 * @return Analysis result.
+	 *
+	 * @throws TskCoreException
+	 */
+	AnalysisResult newAnalysisResult(BlackboardArtifact.Type artifactType, long objId, Long dataSourceObjId, Score score, String conclusion, String configuration, String justification, CaseDbConnection connection) throws TskCoreException {
+
+		if (artifactType.getCategory() != BlackboardArtifact.Category.ANALYSIS_RESULT) {
+			throw new TskCoreException(String.format("Artifact type (name = %s) is not of the AnalysisResult category. ", artifactType.getTypeName()));
+		}
+
+		long artifactID;
+		acquireSingleUserCaseWriteLock();
+		try {
+			// add a row in tsk_objects
+			long artifactObjId = addObject(objId, TskData.ObjectType.ARTIFACT.getObjectType(), connection);
+
+			// add a row in blackboard_artifacts table
+			PreparedStatement insertArtifactstatement;
+			ResultSet resultSet = null;
+			try {
+				insertArtifactstatement = createInsertArtifactStatement(artifactType.getTypeID(), objId, artifactObjId, dataSourceObjId, connection);
+				connection.executeUpdate(insertArtifactstatement);
+				resultSet = insertArtifactstatement.getGeneratedKeys();
+				resultSet.next();
+				artifactID = resultSet.getLong(1); //last_insert_rowid()
+
+				// add a row in tsk_analysis_results if any data for it is set
+				if (score.getSignificance() != Score.Significance.UNKNOWN
+						|| !StringUtils.isBlank(conclusion)
+						|| !StringUtils.isBlank(configuration)
+						|| !StringUtils.isBlank(justification)) {
+
+					PreparedStatement analysisResultsStatement;
+
+					analysisResultsStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_ANALYSIS_RESULT);
+					analysisResultsStatement.clearParameters();
+
+					analysisResultsStatement.setLong(1, artifactObjId);
+					analysisResultsStatement.setString(2, (conclusion != null) ? conclusion : "");
+					analysisResultsStatement.setInt(3, score.getSignificance().getId());
+					analysisResultsStatement.setInt(4, score.getPriority().getId());
+					analysisResultsStatement.setString(5, (configuration != null) ? configuration : "");
+					analysisResultsStatement.setString(6, (justification != null) ? justification : "");
+
+					connection.executeUpdate(analysisResultsStatement);
+				}
+
+				return new AnalysisResult(this, artifactID, objId, artifactObjId, dataSourceObjId, artifactType.getTypeID(),
+						artifactType.getTypeName(), artifactType.getDisplayName(),
+						BlackboardArtifact.ReviewStatus.UNDECIDED, true,
+						score, (conclusion != null) ? conclusion : "",
+						(configuration != null) ? configuration : "", (justification != null) ? justification : "");
+
+			} finally {
+				closeResultSet(resultSet);
+			}
+
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error creating a analysis result", ex);
+		} finally {
+			releaseSingleUserCaseWriteLock();
+		}
+	}
+
 	/**
 	 * Checks if the content object has children. Note: this is generally more
 	 * efficient then preloading all children and checking if the set is empty,
@@ -4624,10 +5425,12 @@ private BlackboardArtifact newBlackboardArtifact(int artifact_type_id, long obj_
 	 *                          within tsk core
 	 */
 	boolean getContentHasChildren(Content content) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(obj_id) AS count FROM tsk_objects WHERE par_obj_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CHILD_OBJECTS_BY_PARENT);
 			statement.clearParameters();
@@ -4642,7 +5445,7 @@ boolean getContentHasChildren(Content content) throws TskCoreException {
 			throw new TskCoreException("Error checking for children of parent " + content, e);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4665,10 +5468,12 @@ int getContentChildrenCount(Content content) throws TskCoreException {
 			return 0;
 		}
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(obj_id) AS count FROM tsk_objects WHERE par_obj_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CHILD_OBJECTS_BY_PARENT);
 			statement.clearParameters();
@@ -4683,7 +5488,7 @@ int getContentChildrenCount(Content content) throws TskCoreException {
 			throw new TskCoreException("Error checking for children of parent " + content, e);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4700,10 +5505,12 @@ int getContentChildrenCount(Content content) throws TskCoreException {
 	 *                          within tsk core
 	 */
 	List<Content> getAbstractFileChildren(Content parent, TSK_DB_FILES_TYPE_ENUM type) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_PARENT_AND_TYPE);
 			statement.clearParameters();
 			long parentId = parent.getId();
@@ -4715,11 +5522,11 @@ List<Content> getAbstractFileChildren(Content parent, TSK_DB_FILES_TYPE_ENUM typ
 			throw new TskCoreException("Error getting AbstractFile children for Content", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
-
+	
 	/**
 	 * Returns the list of all AbstractFile Children for a given
 	 * AbstractFileParent
@@ -4730,10 +5537,12 @@ List<Content> getAbstractFileChildren(Content parent, TSK_DB_FILES_TYPE_ENUM typ
 	 *                          within tsk core
 	 */
 	List<Content> getAbstractFileChildren(Content parent) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_PARENT);
 			statement.clearParameters();
 			long parentId = parent.getId();
@@ -4744,7 +5553,7 @@ List<Content> getAbstractFileChildren(Content parent) throws TskCoreException {
 			throw new TskCoreException("Error getting AbstractFile children for Content", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4761,10 +5570,12 @@ List<Content> getAbstractFileChildren(Content parent) throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	List<Long> getAbstractFileChildrenIds(Content parent, TSK_DB_FILES_TYPE_ENUM type) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_IDS_BY_PARENT_AND_TYPE);
 			statement.clearParameters();
 			statement.setLong(1, parent.getId());
@@ -4779,7 +5590,7 @@ List<Long> getAbstractFileChildrenIds(Content parent, TSK_DB_FILES_TYPE_ENUM typ
 			throw new TskCoreException("Error getting AbstractFile children for Content", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4794,10 +5605,12 @@ List<Long> getAbstractFileChildrenIds(Content parent, TSK_DB_FILES_TYPE_ENUM typ
 	 * @throws TskCoreException
 	 */
 	List<Long> getAbstractFileChildrenIds(Content parent) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILE_IDS_BY_PARENT);
 			statement.clearParameters();
 			statement.setLong(1, parent.getId());
@@ -4811,7 +5624,7 @@ List<Long> getAbstractFileChildrenIds(Content parent) throws TskCoreException {
 			throw new TskCoreException("Error getting AbstractFile children for Content", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4827,10 +5640,12 @@ List<Long> getAbstractFileChildrenIds(Content parent) throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	List<Long> getBlackboardArtifactChildrenIds(Content parent) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_OBJECTIDS_BY_PARENT);
 			statement.clearParameters();
 			statement.setLong(1, parent.getId());
@@ -4844,7 +5659,7 @@ List<Long> getBlackboardArtifactChildrenIds(Content parent) throws TskCoreExcept
 			throw new TskCoreException("Error getting children for BlackboardArtifact", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4877,11 +5692,12 @@ List<Content> getBlackboardArtifactChildren(Content parent) throws TskCoreExcept
 	 *                          within tsk core
 	 */
 	Collection<ObjectInfo> getChildrenInfo(Content c) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT tsk_objects.obj_id AS obj_id, tsk_objects.type AS type " //NON-NLS
 					+ "FROM tsk_objects LEFT JOIN tsk_files " //NON-NLS
@@ -4898,7 +5714,7 @@ Collection<ObjectInfo> getChildrenInfo(Content c) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -4928,11 +5744,12 @@ ObjectInfo getParentInfo(Content c) throws TskCoreException {
 	 *                          within tsk core
 	 */
 	ObjectInfo getParentInfo(long contentId) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT parent.obj_id AS obj_id, parent.type AS type " //NON-NLS
 					+ "FROM tsk_objects AS parent INNER JOIN tsk_objects AS child " //NON-NLS
@@ -4948,7 +5765,7 @@ ObjectInfo getParentInfo(long contentId) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -5000,14 +5817,15 @@ public Content getContentById(long id) throws TskCoreException {
 			return content;
 		}
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
-		Statement s = null;
-		ResultSet rs = null;
 		long parentId;
 		TskData.ObjectType type;
-
+		
+		CaseDbConnection connection = null;
+		Statement s = null;
+		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_objects WHERE obj_id = " + id + " LIMIT  1"); //NON-NLS
 			if (!rs.next()) {
@@ -5020,7 +5838,7 @@ public Content getContentById(long id) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -5062,8 +5880,14 @@ public Content getContentById(long id) throws TskCoreException {
 			case REPORT:
 				content = getReportById(id);
 				break;
+			case OS_ACCOUNT:
+				content = this.osAccountManager.getOsAccountByObjectId(id);
+				break;
+			case HOST_ADDRESS:
+				content = hostAddressManager.getHostAddress(id);
+				break;
 			default:
-				throw new TskCoreException("Could not obtain Content object with ID: " + id);
+				content = new UnsupportedContent(this, id);
 		}
 
 		return content;
@@ -5077,17 +5901,14 @@ public Content getContentById(long id) throws TskCoreException {
 	 * @return file path or null
 	 */
 	String getFilePath(long id) {
-		CaseDbConnection connection;
-		try {
-			connection = connections.getConnection();
-		} catch (TskCoreException ex) {
-			logger.log(Level.SEVERE, "Error getting file path for file " + id, ex); //NON-NLS
-			return null;
-		}
+
 		String filePath = null;
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_LOCAL_PATH_FOR_FILE);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -5095,11 +5916,11 @@ String getFilePath(long id) {
 			if (rs.next()) {
 				filePath = rs.getString("path");
 			}
-		} catch (SQLException ex) {
+		} catch (SQLException | TskCoreException ex) {
 			logger.log(Level.SEVERE, "Error getting file path for file " + id, ex); //NON-NLS
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return filePath;
@@ -5113,17 +5934,13 @@ String getFilePath(long id) {
 	 * @return Encoding type (NONE if nothing was found)
 	 */
 	TskData.EncodingType getEncodingType(long id) {
-		CaseDbConnection connection;
-		try {
-			connection = connections.getConnection();
-		} catch (TskCoreException ex) {
-			logger.log(Level.SEVERE, "Error getting file path for file " + id, ex); //NON-NLS
-			return null;
-		}
+
 		TskData.EncodingType type = TskData.EncodingType.NONE;
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ENCODING_FOR_FILE);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -5131,11 +5948,11 @@ TskData.EncodingType getEncodingType(long id) {
 			if (rs.next()) {
 				type = TskData.EncodingType.valueOf(rs.getInt(1));
 			}
-		} catch (SQLException ex) {
+		} catch (SQLException | TskCoreException ex) {
 			logger.log(Level.SEVERE, "Error getting encoding type for file " + id, ex); //NON-NLS
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return type;
@@ -5210,12 +6027,15 @@ String getFileName(long objectId, CaseDbConnection connection) {
 	 *                          method could not be queried
 	 */
 	DerivedFile.DerivedMethod getDerivedMethod(long id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		
 		DerivedFile.DerivedMethod method = null;
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs1 = null;
 		ResultSet rs2 = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_DERIVED_FILE);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -5239,7 +6059,7 @@ DerivedFile.DerivedMethod getDerivedMethod(long id) throws TskCoreException {
 		} finally {
 			closeResultSet(rs2);
 			closeResultSet(rs1);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return method;
@@ -5260,7 +6080,7 @@ public AbstractFile getAbstractFileById(long id) throws TskCoreException {
 		try {
 			return getAbstractFileById(id, connection);
 		} finally {
-			connection.close();
+			closeConnection(connection);
 		}
 	}
 
@@ -5302,6 +6122,7 @@ AbstractFile getAbstractFileById(long objectId, CaseDbConnection connection) thr
 	 * Get artifact from blackboard_artifacts table by its artifact_obj_id
 	 *
 	 * @param id id of the artifact in blackboard_artifacts table
+	 *           (artifact_obj_id column)
 	 *
 	 * @return Artifact object populated, or null if not found.
 	 *
@@ -5309,25 +6130,39 @@ AbstractFile getAbstractFileById(long objectId, CaseDbConnection connection) thr
 	 *                          core and file could not be queried
 	 */
 	public BlackboardArtifact getArtifactById(long id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
-			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_BY_ARTIFACT_OBJ_ID);
+			connection = connections.getConnection();
+			
+			// get the artifact type.
+			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_TYPE_BY_ARTIFACT_OBJ_ID);
 			statement.clearParameters();
 			statement.setLong(1, id);
+
 			rs = connection.executeQuery(statement);
-			List<BlackboardArtifact> artifacts = resultSetToArtifacts(rs);
-			if (artifacts.size() > 0) {
-				return artifacts.get(0);
-			} else {
-				return null;
+			if (!rs.next()) {
+				throw new TskCoreException("Error getting artifacttype for artifact with artifact_obj_id = " + id);
+			}
+
+			// based on the artifact type category, get the analysis result or the data artifact
+			BlackboardArtifact.Type artifactType = getArtifactType(rs.getInt("artifact_type_id"));
+			switch (artifactType.getCategory()) {
+				case ANALYSIS_RESULT:
+					return blackboard.getAnalysisResultById(id);
+				case DATA_ARTIFACT:
+					return blackboard.getDataArtifactById(id);
+				default:
+					throw new TskCoreException(String.format("Unknown artifact category for artifact with artifact_obj_id = %d, and artifact type = %s", id, artifactType.getTypeName()));
 			}
+
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting artifacts by artifact_obj_id, artifact_obj_id = " + id, ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -5343,10 +6178,12 @@ public BlackboardArtifact getArtifactById(long id) throws TskCoreException {
 	 *                          core and file could not be queried
 	 */
 	public BlackboardArtifact getArtifactByArtifactId(long id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ARTIFACT_BY_ARTIFACT_ID);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -5361,7 +6198,7 @@ public BlackboardArtifact getArtifactByArtifactId(long id) throws TskCoreExcepti
 			throw new TskCoreException("Error getting artifacts by artifact id, artifact id = " + id, ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -5415,11 +6252,12 @@ private long getFileSystemId(long fileId, CaseDbConnection connection) {
 	 */
 	public boolean isFileFromSource(Content dataSource, long fileId) throws TskCoreException {
 		String query = String.format("SELECT COUNT(*) AS count FROM tsk_files WHERE obj_id = %d AND data_source_obj_id = %d", fileId, dataSource.getId()); //NON-NLS
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement statement = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			resultSet = connection.executeQuery(statement, query);
 			resultSet.next();
@@ -5429,10 +6267,26 @@ public boolean isFileFromSource(Content dataSource, long fileId) throws TskCoreE
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
+	
+	/**
+	 * Returns true if the string contains a SQL LIKE statement wild card based 
+	 * on https://www.postgresql.org/docs/9.5/functions-matching.html and 
+	 * https://sqlite.org/lang_expr.html#the_like_glob_regexp_and_match_operators.
+	 * 
+	 * @param str The string.
+	 * @return True if it contains a LIKE wild card.
+	 */
+	private static boolean containsLikeWildcard(String str) {
+		if (str == null) {
+			return false;
+		} else {
+			return str.contains("%") || str.contains("_");
+		}
+	}
 
 	/**
 	 * @param dataSource the dataSource (Image, parent-less VirtualDirectory) to
@@ -5446,22 +6300,39 @@ public boolean isFileFromSource(Content dataSource, long fileId) throws TskCoreE
 	 * @throws TskCoreException thrown if check failed
 	 */
 	public List<AbstractFile> findFiles(Content dataSource, String fileName) throws TskCoreException {
-		List<AbstractFile> files = new ArrayList<AbstractFile>();
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		String ext = "";
+		if (!containsLikeWildcard(fileName)) {
+			ext = SleuthkitCase.extractExtension(fileName);	
+		}
+		
+		List<AbstractFile> files = new ArrayList<>();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
-			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_DATA_SOURCE_AND_NAME);
-			statement.clearParameters();
-			statement.setString(1, fileName.toLowerCase());
-			statement.setLong(2, dataSource.getId());
+			connection = connections.getConnection();
+			
+			PreparedStatement statement;
+			if (ext.isEmpty()) {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_DATA_SOURCE_AND_NAME);
+				statement.clearParameters();
+				statement.setString(1, fileName.toLowerCase());
+				statement.setLong(2, dataSource.getId());
+			} else {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_NAME);
+				statement.clearParameters();
+				statement.setString(1, ext);
+				statement.setString(2, fileName.toLowerCase());
+				statement.setLong(3, dataSource.getId());
+			}
+
 			resultSet = connection.executeQuery(statement);
 			files.addAll(resultSetToAbstractFiles(resultSet, connection));
 		} catch (SQLException e) {
 			throw new TskCoreException(bundle.getString("SleuthkitCase.findFiles.exception.msg3.text"), e);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return files;
@@ -5481,23 +6352,40 @@ public List<AbstractFile> findFiles(Content dataSource, String fileName) throws
 	 * @throws org.sleuthkit.datamodel.TskCoreException
 	 */
 	public List<AbstractFile> findFiles(Content dataSource, String fileName, String dirSubString) throws TskCoreException {
-		List<AbstractFile> files = new ArrayList<AbstractFile>();
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		String ext = "";
+		if (!containsLikeWildcard(fileName)) {
+			ext = SleuthkitCase.extractExtension(fileName);	
+		}
+		
+		List<AbstractFile> files = new ArrayList<>();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
-			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_DATA_SOURCE_AND_PARENT_PATH_AND_NAME);
-			statement.clearParameters();
-			statement.setString(1, fileName.toLowerCase());
-			statement.setString(2, "%" + dirSubString.toLowerCase() + "%"); //NON-NLS
-			statement.setLong(3, dataSource.getId());
+			connection = connections.getConnection();
+			PreparedStatement statement;
+			if (ext.isEmpty()) {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_DATA_SOURCE_AND_PARENT_PATH_AND_NAME);
+				statement.clearParameters();
+				statement.setString(1, fileName.toLowerCase());
+				statement.setString(2, "%" + dirSubString.toLowerCase() + "%"); //NON-NLS
+				statement.setLong(3, dataSource.getId());
+			} else {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_PARENT_PATH_AND_NAME);
+				statement.clearParameters();
+				statement.setString(1, ext);
+				statement.setString(2, fileName.toLowerCase());
+				statement.setString(3, "%" + dirSubString.toLowerCase() + "%"); //NON-NLS
+				statement.setLong(4, dataSource.getId());
+			}
+			
 			resultSet = connection.executeQuery(statement);
 			files.addAll(resultSetToAbstractFiles(resultSet, connection));
 		} catch (SQLException e) {
 			throw new TskCoreException(bundle.getString("SleuthkitCase.findFiles3.exception.msg3.text"), e);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return files;
@@ -5619,8 +6507,8 @@ public VirtualDirectory addVirtualDirectory(long parentId, String directoryName,
 
 			// Insert a row for the virtual directory into the tsk_files table.
 			// INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type,
-			// dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id,extension)
-			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)
+			// dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id,extension,owner_uid, os_account_obj_id)
+			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?,?,?)
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 			statement.clearParameters();
 			statement.setLong(1, newObjId);
@@ -5685,6 +6573,10 @@ public VirtualDirectory addVirtualDirectory(long parentId, String directoryName,
 
 			//extension, since this is not really file we just set it to null
 			statement.setString(21, null);
+
+			statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+			statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
+
 			connection.executeUpdate(statement);
 
 			return new VirtualDirectory(this, newObjId, dataSourceObjectId, directoryName, dirType,
@@ -5764,8 +6656,8 @@ public LocalDirectory addLocalDirectory(long parentId, String directoryName, Cas
 
 			// Insert a row for the local directory into the tsk_files table.
 			// INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type,
-			// dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, known, mime_type, parent_path, data_source_obj_id)
-			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+			// dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, known, mime_type, parent_path, data_source_obj_id, extension, owner_uid, os_account_obj_id)
+			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 			statement.clearParameters();
 			statement.setLong(1, newObjId);
@@ -5817,6 +6709,9 @@ public LocalDirectory addLocalDirectory(long parentId, String directoryName, Cas
 			//extension, since this is a directory we just set it to null
 			statement.setString(21, null);
 
+			statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+			statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
+
 			connection.executeUpdate(statement);
 
 			return new LocalDirectory(this, newObjId, dataSourceObjectId, directoryName, dirType,
@@ -5849,27 +6744,57 @@ public LocalDirectory addLocalDirectory(long parentId, String directoryName, Cas
 	 * @throws TskCoreException if there is an error adding the data source.
 	 */
 	public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootDirectoryName, String timeZone, CaseDbTransaction transaction) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
+		return addLocalFilesDataSource(deviceId, rootDirectoryName, timeZone, null, transaction);
+	}
+
+	/**
+	 * Adds a local/logical files and/or directories data source.
+	 *
+	 * @param deviceId          An ASCII-printable identifier for the device
+	 *                          associated with the data source that is intended
+	 *                          to be unique across multiple cases (e.g., a
+	 *                          UUID).
+	 * @param rootDirectoryName The name for the root virtual directory for the
+	 *                          data source.
+	 * @param timeZone          The time zone used to process the data source,
+	 *                          may be the empty string.
+	 * @param host              The host for the data source (may be null)
+	 * @param transaction       A transaction in the scope of which the
+	 *                          operation is to be performed, managed by the
+	 *                          caller.
+	 *
+	 * @return The new local files data source.
+	 *
+	 * @throws TskCoreException if there is an error adding the data source.
+	 */
+	public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String rootDirectoryName, String timeZone, Host host, CaseDbTransaction transaction) throws TskCoreException {
+
 		Statement statement = null;
 		try {
+			CaseDbConnection connection = transaction.getConnection();
+
 			// Insert a row for the root virtual directory of the data source
 			// into the tsk_objects table.
-			CaseDbConnection connection = transaction.getConnection();
 			long newObjId = addObject(0, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection);
 
+			// If no host was supplied, make one
+			if (host == null) {
+				host = getHostManager().newHost("LogicalFileSet_" + newObjId + " Host", transaction);
+			}
+
 			// Insert a row for the virtual directory of the data source into
 			// the data_source_info table.
 			statement = connection.createStatement();
-			statement.executeUpdate("INSERT INTO data_source_info (obj_id, device_id, time_zone) "
-					+ "VALUES(" + newObjId + ", '" + deviceId + "', '" + timeZone + "');");
+			statement.executeUpdate("INSERT INTO data_source_info (obj_id, device_id, time_zone, host_id) "
+					+ "VALUES(" + newObjId + ", '" + deviceId + "', '" + timeZone + "', " + host.getHostId() + ");");
 
 			// Insert a row for the root virtual directory of the data source
 			// into the tsk_files table. Note that its data source object id is
 			// its own object id.
 			// INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path,
 			// dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime,
-			// atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id, extension)
-			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)
+			// atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id, extension, owner_uid, os_account_obj_id)
+			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?)
 			PreparedStatement preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 			preparedStatement.clearParameters();
 			preparedStatement.setLong(1, newObjId);
@@ -5899,6 +6824,8 @@ public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String root
 			preparedStatement.setString(19, parentPath);
 			preparedStatement.setLong(20, newObjId);
 			preparedStatement.setString(21, null); //extension, just set it to null
+			preparedStatement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+			preparedStatement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
 			connection.executeUpdate(preparedStatement);
 
 			return new LocalFilesDataSource(this, newObjId, newObjId, deviceId, rootDirectoryName, dirType, metaType, dirFlag, metaFlags, timeZone, null, null, FileKnown.UNKNOWN, parentPath);
@@ -5907,7 +6834,6 @@ public LocalFilesDataSource addLocalFilesDataSource(String deviceId, String root
 			throw new TskCoreException(String.format("Error creating local files data source with device id %s and directory name %s", deviceId, rootDirectoryName), ex);
 		} finally {
 			closeStatement(statement);
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -5934,7 +6860,33 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 			String timezone, String md5, String sha1, String sha256,
 			String deviceId,
 			CaseDbTransaction transaction) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
+		return addImage(type, sectorSize, size, displayName, imagePaths, timezone, md5, sha1, sha256, deviceId, null, transaction);
+	}
+
+	/**
+	 * Add an image to the database.
+	 *
+	 * @param type        Type of image
+	 * @param sectorSize  Sector size
+	 * @param size        Image size
+	 * @param displayName Display name for the image
+	 * @param imagePaths  Image path(s)
+	 * @param timezone    Time zone
+	 * @param md5         MD5 hash
+	 * @param sha1        SHA1 hash
+	 * @param sha256      SHA256 hash
+	 * @param deviceId    Device ID
+	 * @param host        Host
+	 * @param transaction Case DB transaction
+	 *
+	 * @return the newly added Image
+	 *
+	 * @throws TskCoreException
+	 */
+	public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size, String displayName, List<String> imagePaths,
+			String timezone, String md5, String sha1, String sha256,
+			String deviceId, Host host,
+			CaseDbTransaction transaction) throws TskCoreException {
 		Statement statement = null;
 		try {
 			// Insert a row for the Image into the tsk_objects table.
@@ -5968,15 +6920,7 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 				connection.executeUpdate(preparedStatement);
 			}
 
-			// Add a row to data_source_info
-			preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_DATA_SOURCE_INFO);
-			statement = connection.createStatement();
-			preparedStatement.setLong(1, newObjId);
-			preparedStatement.setString(2, deviceId);
-			preparedStatement.setString(3, timezone);
-			connection.executeUpdate(preparedStatement);
-
-			// Create the new Image object
+			// Create the display name
 			String name = displayName;
 			if (name == null || name.isEmpty()) {
 				if (imagePaths.size() > 0) {
@@ -5985,7 +6929,28 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 				} else {
 					name = "";
 				}
-			}			
+			}
+
+			// Create a host if needed
+			if (host == null) {
+				if (name.isEmpty()) {
+					host = getHostManager().newHost("Image_" + newObjId + " Host", transaction);
+				} else {
+					host = getHostManager().newHost(name + "_" + newObjId + " Host", transaction);
+				}
+			}
+
+			// Add a row to data_source_info
+			preparedStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_DATA_SOURCE_INFO);
+			statement = connection.createStatement();
+			preparedStatement.setLong(1, newObjId);
+			preparedStatement.setString(2, deviceId);
+			preparedStatement.setString(3, timezone);
+			preparedStatement.setLong(4, new Date().getTime());
+			preparedStatement.setLong(5, host.getHostId());
+			connection.executeUpdate(preparedStatement);
+
+			// Create the new Image object
 			return new Image(this, newObjId, type.getValue(), deviceId, sectorSize, name,
 					imagePaths.toArray(new String[imagePaths.size()]), timezone, md5, sha1, sha256, savedSize);
 		} catch (SQLException ex) {
@@ -5996,7 +6961,6 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 			}
 		} finally {
 			closeStatement(statement);
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -6015,7 +6979,6 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 	 */
 	public VolumeSystem addVolumeSystem(long parentObjId, TskData.TSK_VS_TYPE_ENUM type, long imgOffset,
 			long blockSize, CaseDbTransaction transaction) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
 		try {
 			// Insert a row for the VolumeSystem into the tsk_objects table.
 			CaseDbConnection connection = transaction.getConnection();
@@ -6036,8 +6999,6 @@ public VolumeSystem addVolumeSystem(long parentObjId, TskData.TSK_VS_TYPE_ENUM t
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error creating volume system with parent ID %d and image offset %d",
 					parentObjId, imgOffset), ex);
-		} finally {
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -6058,8 +7019,6 @@ public VolumeSystem addVolumeSystem(long parentObjId, TskData.TSK_VS_TYPE_ENUM t
 	 */
 	public Volume addVolume(long parentObjId, long addr, long start, long length, String desc,
 			long flags, CaseDbTransaction transaction) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
-		Statement statement = null;
 		try {
 			// Insert a row for the Volume into the tsk_objects table.
 			CaseDbConnection connection = transaction.getConnection();
@@ -6086,9 +7045,6 @@ public Volume addVolume(long parentObjId, long addr, long start, long length, St
 			return new Volume(this, newObjId, addr, start, length, flags, desc);
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error creating volume with address %d and parent ID %d", addr, parentObjId), ex);
-		} finally {
-			closeStatement(statement);
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -6104,8 +7060,6 @@ public Volume addVolume(long parentObjId, long addr, long start, long length, St
 	 * @throws TskCoreException
 	 */
 	public Pool addPool(long parentObjId, TskData.TSK_POOL_TYPE_ENUM type, CaseDbTransaction transaction) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
-		Statement statement = null;
 		try {
 			// Insert a row for the Pool into the tsk_objects table.
 			CaseDbConnection connection = transaction.getConnection();
@@ -6123,9 +7077,6 @@ public Pool addPool(long parentObjId, TskData.TSK_POOL_TYPE_ENUM type, CaseDbTra
 			return new Pool(this, newObjId, type.getName(), type.getValue());
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error creating pool with type %d and parent ID %d", type.getValue(), parentObjId), ex);
-		} finally {
-			closeStatement(statement);
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -6150,8 +7101,6 @@ public Pool addPool(long parentObjId, TskData.TSK_POOL_TYPE_ENUM type, CaseDbTra
 	public FileSystem addFileSystem(long parentObjId, long imgOffset, TskData.TSK_FS_TYPE_ENUM type, long blockSize, long blockCount,
 			long rootInum, long firstInum, long lastInum, String displayName,
 			CaseDbTransaction transaction) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
-		Statement statement = null;
 		try {
 			// Insert a row for the FileSystem into the tsk_objects table.
 			CaseDbConnection connection = transaction.getConnection();
@@ -6182,9 +7131,6 @@ public FileSystem addFileSystem(long parentObjId, long imgOffset, TskData.TSK_FS
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error creating file system with image offset %d and parent ID %d",
 					imgOffset, parentObjId), ex);
-		} finally {
-			closeStatement(statement);
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -6221,10 +7167,81 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			long ctime, long crtime, long atime, long mtime,
 			boolean isFile, Content parent) throws TskCoreException {
 
+		CaseDbTransaction transaction = beginTransaction();
+		try {
+
+			FsContent fileSystemFile = addFileSystemFile(dataSourceObjId, fsObjId, fileName,
+					metaAddr, metaSeq, attrType, attrId, dirFlag, metaFlags, size,
+					ctime, crtime, atime, mtime, null, null, null, isFile, parent,
+					OsAccount.NO_OWNER_ID, null,
+					Collections.emptyList(), transaction);
+
+			transaction.commit();
+			transaction = null;
+			return fileSystemFile;
+		} finally {
+			if (null != transaction) {
+				try {
+					transaction.rollback();
+				} catch (TskCoreException ex2) {
+					logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2);
+				}
+			}
+		}
+	}
+
+	/**
+	 * Add a file system file.
+	 *
+	 * @param dataSourceObjId The object id of the root data source of this
+	 *                        file.
+	 * @param fsObjId         The file system object id.
+	 * @param fileName        The name of the file.
+	 * @param metaAddr        The meta address of the file.
+	 * @param metaSeq         The meta address sequence of the file.
+	 * @param attrType        The attributed type of the file.
+	 * @param attrId          The attribute id.
+	 * @param dirFlag         The allocated status from the name structure
+	 * @param metaFlags       The allocated status of the file, usually as
+	 *                        reported in the metadata structure of the file
+	 *                        system.
+	 * @param size            The size of the file in bytes.
+	 * @param ctime           The changed time of the file.
+	 * @param crtime          The creation time of the file.
+	 * @param atime           The accessed time of the file
+	 * @param mtime           The modified time of the file.
+	 * @param md5Hash         The MD5 hash of the file
+	 * @param sha256Hash      The SHA256 hash of the file
+	 * @param mimeType        The MIME type of the file
+	 * @param isFile          True, unless the file is a directory.
+	 * @param parent          The parent of the file (e.g., a virtual
+	 *                        directory).
+	 * @param ownerUid        UID of the file owner as found in the file system,
+	 *                        can be null.
+	 * @param osAccount       OS account of owner, may be null.
+	 * @param fileAttributes  A list of file attributes. May be empty.
+	 * @param transaction     A caller-managed transaction within which the add
+	 *                        file operations are performed.
+	 *
+	 * @return Newly created file
+	 *
+	 * @throws TskCoreException
+	 */
+	public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
+			String fileName,
+			long metaAddr, int metaSeq,
+			TSK_FS_ATTR_TYPE_ENUM attrType, int attrId,
+			TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size,
+			long ctime, long crtime, long atime, long mtime,
+			String md5Hash, String sha256Hash, String mimeType,
+			boolean isFile, Content parent, String ownerUid,
+			OsAccount osAccount, List<Attribute> fileAttributes,
+			CaseDbTransaction transaction) throws TskCoreException {
+
 		TimelineManager timelineManager = getTimelineManager();
 
-		CaseDbTransaction transaction = beginTransaction();
 		Statement queryStatement = null;
+		String parentPath = "/";
 		try {
 			CaseDbConnection connection = transaction.getConnection();
 
@@ -6232,8 +7249,6 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			// INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)
 			long objectId = addObject(parent.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection);
 
-			String parentPath;
-
 			if (parent instanceof AbstractFile) {
 				AbstractFile parentFile = (AbstractFile) parent;
 				if (isRootDirectory(parentFile, transaction)) {
@@ -6268,40 +7283,49 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 			statement.setLong(17, crtime);
 			statement.setLong(18, atime);
 			statement.setLong(19, mtime);
-			statement.setString(20, parentPath);
+			statement.setString(20, md5Hash);
+			statement.setString(21, sha256Hash);
+			statement.setString(22, mimeType);
+			statement.setString(23, parentPath);
 			final String extension = extractExtension(fileName);
-			statement.setString(21, extension);
+			statement.setString(24, extension);
+			statement.setString(25, ownerUid);
+			if (null != osAccount) {
+				statement.setLong(26, osAccount.getId());
+			} else {
+				statement.setNull(26, java.sql.Types.BIGINT); // osAccountObjId
+			}
 
 			connection.executeUpdate(statement);
 
+			Long osAccountId = (osAccount != null) ? osAccount.getId() : null;
 			DerivedFile derivedFile = new DerivedFile(this, objectId, dataSourceObjId, fileName, dirType, metaType, dirFlag, metaFlags,
-					size, ctime, crtime, atime, mtime, null, null, null, parentPath, null, parent.getId(), null, null, extension);
+					size, ctime, crtime, atime, mtime, md5Hash, sha256Hash, null, parentPath, null, parent.getId(), mimeType, null, extension, ownerUid, osAccountId);
 
 			timelineManager.addEventsForNewFile(derivedFile, connection);
 
-			transaction.commit();
-			transaction = null;
+			for (Attribute fileAttribute : fileAttributes) {
+				fileAttribute.setAttributeParentId(objectId);
+				fileAttribute.setCaseDatabase(this);
+				addFileAttribute(fileAttribute, connection);
+			}
+
+			if (osAccount != null) {
+				osAccountManager.newOsAccountInstance(osAccount.getId(), dataSourceObjId, OsAccountInstance.OsAccountInstanceType.LAUNCHED, connection);
+			}
 
 			return new org.sleuthkit.datamodel.File(this, objectId, dataSourceObjId, fsObjId,
 					attrType, attrId, fileName, metaAddr, metaSeq,
 					dirType, metaType, dirFlag, metaFlags,
 					size, ctime, crtime, atime, mtime,
-					(short) 0, 0, 0, null, null, null, parentPath, null,
-					extension);
+					(short) 0, 0, 0, md5Hash, sha256Hash, null, parentPath, mimeType,
+					extension, ownerUid, osAccountId, fileAttributes);
 
 		} catch (SQLException ex) {
-			logger.log(Level.WARNING, "Failed to add file system file", ex);
+			throw new TskCoreException(String.format("Failed to INSERT file system file %s (%s) with parent id %d in tsk_files table", fileName, parentPath, parent.getId()), ex);
 		} finally {
 			closeStatement(queryStatement);
-			if (null != transaction) {
-				try {
-					transaction.rollback();
-				} catch (TskCoreException ex2) {
-					logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2);
-				}
-			}
 		}
-		return null;
 	}
 
 	/**
@@ -6313,11 +7337,12 @@ public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId,
 	 * @throws org.sleuthkit.datamodel.TskCoreException
 	 */
 	public List<VirtualDirectory> getVirtualDirectoryRoots() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE" //NON-NLS
 					+ " type = " + TskData.TSK_DB_FILES_TYPE_ENUM.VIRTUAL_DIR.getFileType()
@@ -6333,7 +7358,7 @@ public List<VirtualDirectory> getVirtualDirectoryRoots() throws TskCoreException
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -6383,8 +7408,9 @@ public final List<LayoutFile> addLayoutFiles(Content parent, List<TskFileRange>
 				 * INSERT INTO tsk_files (obj_id, fs_obj_id, name, type,
 				 * has_path, dir_type, meta_type, dir_flags, meta_flags, size,
 				 * ctime, crtime, atime, mtime, md5, known, mime_type,
-				 * parent_path, data_source_obj_id,extension) VALUES (?, ?, ?,
-				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)
+				 * parent_path, data_source_obj_id,extension, owner_uid,
+				 * os_account_obj_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
+				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?)
 				 */
 				PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 				prepStmt.clearParameters();
@@ -6411,6 +7437,10 @@ public final List<LayoutFile> addLayoutFiles(Content parent, List<TskFileRange>
 
 				//extension, since this is not a FS file we just set it to null
 				prepStmt.setString(21, null);
+
+				prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+				prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
+
 				connection.executeUpdate(prepStmt);
 
 				/*
@@ -6443,7 +7473,9 @@ public final List<LayoutFile> addLayoutFiles(Content parent, List<TskFileRange>
 						null, null,
 						FileKnown.UNKNOWN,
 						parent.getUniquePath(),
-						null));
+						null,
+						OsAccount.NO_OWNER_ID,
+						OsAccount.NO_ACCOUNT));
 			}
 
 			transaction.commit();
@@ -6493,10 +7525,7 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 		CaseDbTransaction transaction = null;
 		Statement statement = null;
 		ResultSet resultSet = null;
-		long newCacheKey = 0; // Used to roll back cache if transaction is rolled back.
 		try {
-			transaction = beginTransaction();
-			CaseDbConnection connection = transaction.getConnection();
 
 			/*
 			 * Carved files are "re-parented" as children of the $CarvedFiles
@@ -6519,39 +7548,43 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 			 * Get or create the $CarvedFiles virtual directory for the root
 			 * ancestor.
 			 */
-			VirtualDirectory carvedFilesDir = rootIdsToCarvedFileDirs.get(root.getId());
-			if (null == carvedFilesDir) {
-				List<Content> rootChildren;
-				if (root instanceof FileSystem) {
-					rootChildren = ((FileSystem) root).getRootDirectory().getChildren();
-				} else {
-					rootChildren = root.getChildren();
-				}
-				for (Content child : rootChildren) {
-					if (child instanceof VirtualDirectory && child.getName().equals(VirtualDirectory.NAME_CARVED)) {
-						carvedFilesDir = (VirtualDirectory) child;
-						break;
-					}
-				}
+			VirtualDirectory carvedFilesDir;
+			synchronized(carvedFileDirsLock) {
+				carvedFilesDir = rootIdsToCarvedFileDirs.get(root.getId());
 				if (null == carvedFilesDir) {
-					long parId = root.getId();
-					// $CarvedFiles should be a child of the root directory, not the file system
+					List<Content> rootChildren;
 					if (root instanceof FileSystem) {
-						Content rootDir = ((FileSystem) root).getRootDirectory();
-						parId = rootDir.getId();
+						rootChildren = ((FileSystem) root).getRootDirectory().getChildren();
+					} else {
+						rootChildren = root.getChildren();
+					}
+					for (Content child : rootChildren) {
+						if (child instanceof VirtualDirectory && child.getName().equals(VirtualDirectory.NAME_CARVED)) {
+							carvedFilesDir = (VirtualDirectory) child;
+							break;
+						}
+					}
+					if (null == carvedFilesDir) {
+						long parId = root.getId();
+						// $CarvedFiles should be a child of the root directory, not the file system
+						if (root instanceof FileSystem) {
+							Content rootDir = ((FileSystem) root).getRootDirectory();
+							parId = rootDir.getId();
+						}
+						carvedFilesDir = addVirtualDirectory(parId, VirtualDirectory.NAME_CARVED);
 					}
-					carvedFilesDir = addVirtualDirectory(parId, VirtualDirectory.NAME_CARVED, transaction);
+					rootIdsToCarvedFileDirs.put(root.getId(), carvedFilesDir);
 				}
-				newCacheKey = root.getId();
-				rootIdsToCarvedFileDirs.put(newCacheKey, carvedFilesDir);
 			}
 
 			/*
 			 * Add the carved files to the database as children of the
 			 * $CarvedFile directory of the root ancestor.
 			 */
+			transaction = beginTransaction();
+			CaseDbConnection connection = transaction.getConnection();
 			String parentPath = getFileParentPath(carvedFilesDir.getId(), connection) + carvedFilesDir.getName() + "/";
-			List<LayoutFile> carvedFiles = new ArrayList<LayoutFile>();
+			List<LayoutFile> carvedFiles = new ArrayList<>();
 			for (CarvingResult.CarvedFile carvedFile : carvingResult.getCarvedFiles()) {
 				/*
 				 * Insert a row for the carved file into the tsk_objects table:
@@ -6564,8 +7597,9 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 				 * INSERT INTO tsk_files (obj_id, fs_obj_id, name, type,
 				 * has_path, dir_type, meta_type, dir_flags, meta_flags, size,
 				 * ctime, crtime, atime, mtime, md5, known, mime_type,
-				 * parent_path, data_source_obj_id,extenion) VALUES (?, ?, ?, ?,
-				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)
+				 * parent_path, data_source_obj_id,extenion, owner_uid,
+				 * os_account_obj_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,
+				 * ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 				 */
 				PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 				prepStmt.clearParameters();
@@ -6594,6 +7628,10 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 				prepStmt.setString(19, parentPath); // parent path
 				prepStmt.setLong(20, carvedFilesDir.getDataSourceObjectId()); // data_source_obj_id
 				prepStmt.setString(21, extractExtension(carvedFile.getName())); //extension
+
+				prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+				prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
+
 				connection.executeUpdate(prepStmt);
 
 				/*
@@ -6628,7 +7666,9 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 						null, null,
 						FileKnown.UNKNOWN,
 						parentPath,
-						null));
+						null,
+						OsAccount.NO_OWNER_ID,
+						OsAccount.NO_ACCOUNT));
 			}
 
 			transaction.commit();
@@ -6647,9 +7687,6 @@ public final List<LayoutFile> addCarvedFiles(CarvingResult carvingResult) throws
 				} catch (TskCoreException ex2) {
 					logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2);
 				}
-				if (0 != newCacheKey) {
-					rootIdsToCarvedFileDirs.remove(newCacheKey);
-				}
 			}
 		}
 	}
@@ -6689,12 +7726,31 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 			boolean isFile, Content parentObj,
 			String rederiveDetails, String toolName, String toolVersion,
 			String otherDetails, TskData.EncodingType encodingType) throws TskCoreException {
+		CaseDbTransaction transaction = beginTransaction();
+		try {
+			DerivedFile df = addDerivedFile(fileName, localPath,
+					size, ctime, crtime, atime, mtime,
+					isFile, parentObj,
+					rederiveDetails, toolName, toolVersion,
+					otherDetails, encodingType, transaction);
+			transaction.commit();
+			return df;
+		} catch (TskCoreException ex) {
+			transaction.rollback();
+			throw ex;
+		}
+	}
+	
+	public DerivedFile addDerivedFile(String fileName, String localPath,
+			long size, long ctime, long crtime, long atime, long mtime,
+			boolean isFile, Content parentObj,
+			String rederiveDetails, String toolName, String toolVersion,
+			String otherDetails, TskData.EncodingType encodingType, CaseDbTransaction transaction) throws TskCoreException {
 		// Strip off any leading slashes from the local path (leading slashes indicate absolute paths)
 		localPath = localPath.replaceAll("^[/\\\\]+", "");
 
 		TimelineManager timelineManager = getTimelineManager();
 
-		CaseDbTransaction transaction = beginTransaction();
 		CaseDbConnection connection = transaction.getConnection();
 		try {
 			final long parentId = parentObj.getId();
@@ -6765,29 +7821,29 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 			statement.setString(19, parentPath);
 
 			// root data source object id
-			long dataSourceObjId = getDataSourceObjectId(connection, parentId);
+			long dataSourceObjId = getDataSourceObjectId(connection, parentObj);
 			statement.setLong(20, dataSourceObjId);
 			final String extension = extractExtension(fileName);
 			//extension
 			statement.setString(21, extension);
 
+			statement.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+			statement.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
+
 			connection.executeUpdate(statement);
 
 			//add localPath
 			addFilePath(connection, newObjId, localPath, encodingType);
 
 			DerivedFile derivedFile = new DerivedFile(this, newObjId, dataSourceObjId, fileName, dirType, metaType, dirFlag, metaFlags,
-					savedSize, ctime, crtime, atime, mtime, null, null, null, parentPath, localPath, parentId, null, encodingType, extension);
+					savedSize, ctime, crtime, atime, mtime, null, null, null, parentPath, localPath, parentId, null, encodingType, extension, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT);
 
 			timelineManager.addEventsForNewFile(derivedFile, connection);
-			transaction.commit();
+
 			//TODO add derived method to tsk_files_derived and tsk_files_derived_method
 			return derivedFile;
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
 			throw new TskCoreException("Failed to add derived file to case database", ex);
-		} finally {
-			connection.close();
 		}
 	}
 
@@ -6827,15 +7883,38 @@ public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath,
 			String rederiveDetails, String toolName, String toolVersion,
 			String otherDetails, TskData.EncodingType encodingType) throws TskCoreException {
 
+		CaseDbTransaction trans = null;
+		try {
+			Content parentObj = derivedFile.getParent();
+			
+			trans = beginTransaction();
+			DerivedFile updatedFile = updateDerivedFile(derivedFile, localPath,
+					size, ctime, crtime, atime, mtime,
+					isFile, mimeType,
+					rederiveDetails, toolName, toolVersion,
+					otherDetails, encodingType, parentObj, trans);
+			trans.commit();
+			return updatedFile;
+		} catch (TskCoreException ex) {
+			if (trans != null) {
+				trans.rollback();
+			}
+			throw ex;
+		}
+	}		
+		
+	public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath,
+			long size, long ctime, long crtime, long atime, long mtime,
+			boolean isFile, String mimeType,
+			String rederiveDetails, String toolName, String toolVersion,
+			String otherDetails, TskData.EncodingType encodingType, 
+			Content parentObj, CaseDbTransaction trans) throws TskCoreException {		
+		
 		// Strip off any leading slashes from the local path (leading slashes indicate absolute paths)
 		localPath = localPath.replaceAll("^[/\\\\]+", "");
-
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseWriteLock();
+		
 		ResultSet rs = null;
 		try {
-			Content parentObj = derivedFile.getParent();
-			connection.beginTransaction();
 			final long parentId = parentObj.getId();
 			String parentPath = "";
 			if (parentObj instanceof BlackboardArtifact) {
@@ -6845,7 +7924,7 @@ public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath,
 			}
 			// UPDATE tsk_files SET type = ?, dir_type = ?, meta_type = ?, dir_flags = ?,  meta_flags = ?, "
 			// + "size= ?, ctime= ?, crtime= ?, atime= ?, mtime= ?, mime_type = ? WHERE obj_id = ?"), //NON-NLS
-			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_DERIVED_FILE);
+			PreparedStatement statement = trans.getConnection().getPreparedStatement(PREPARED_STATEMENT.UPDATE_DERIVED_FILE);
 			statement.clearParameters();
 
 			//type
@@ -6877,24 +7956,19 @@ public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath,
 			statement.setLong(10, mtime);
 			statement.setString(11, mimeType);
 			statement.setString(12, String.valueOf(derivedFile.getId()));
-			connection.executeUpdate(statement);
+			trans.getConnection().executeUpdate(statement);
 
 			//add localPath
-			updateFilePath(connection, derivedFile.getId(), localPath, encodingType);
-
-			connection.commitTransaction();
+			updateFilePath(trans.getConnection(), derivedFile.getId(), localPath, encodingType);
 
-			long dataSourceObjId = getDataSourceObjectId(connection, parentId);
+			long dataSourceObjId = getDataSourceObjectId(trans.getConnection(), parentObj);
 			final String extension = extractExtension(derivedFile.getName());
 			return new DerivedFile(this, derivedFile.getId(), dataSourceObjId, derivedFile.getName(), dirType, metaType, dirFlag, metaFlags,
-					savedSize, ctime, crtime, atime, mtime, null, null, null, parentPath, localPath, parentId, null, encodingType, extension);
+					savedSize, ctime, crtime, atime, mtime, null, null, null, parentPath, localPath, parentId, null, encodingType, extension, derivedFile.getOwnerUid().orElse(null), derivedFile.getOsAccountObjectId().orElse(null));
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
 			throw new TskCoreException("Failed to add derived file to case database", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
-			releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -6974,7 +8048,6 @@ public LocalFile addLocalFile(String fileName, String localPath,
 				isFile, encodingType,
 				parent, transaction);
 	}
-
 	/**
 	 * Adds a local/logical file to the case database. The database operations
 	 * are done within a caller-managed transaction; the caller is responsible
@@ -6989,6 +8062,7 @@ public LocalFile addLocalFile(String fileName, String localPath,
 	 * @param atime        The accessed time of the file
 	 * @param mtime        The modified time of the file.
 	 * @param md5          The MD5 hash of the file
+	 * @param sha256       the SHA-256 hash of the file.
 	 * @param known        The known status of the file (can be null)
 	 * @param mimeType     The MIME type of the file
 	 * @param isFile       True, unless the file is a directory.
@@ -7007,6 +8081,47 @@ public LocalFile addLocalFile(String fileName, String localPath,
 			String md5, String sha256, FileKnown known, String mimeType,
 			boolean isFile, TskData.EncodingType encodingType,
 			Content parent, CaseDbTransaction transaction) throws TskCoreException {
+		
+		return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime,
+				md5, sha256, known, mimeType, isFile, encodingType,
+				OsAccount.NO_ACCOUNT, OsAccount.NO_OWNER_ID, parent, transaction);
+		
+	}
+	/**
+	 * Adds a local/logical file to the case database. The database operations
+	 * are done within a caller-managed transaction; the caller is responsible
+	 * for committing or rolling back the transaction.
+	 *
+	 * @param fileName     The name of the file.
+	 * @param localPath    The absolute path (including the file name) of the
+	 *                     local/logical in secondary storage.
+	 * @param size         The size of the file in bytes.
+	 * @param ctime        The changed time of the file.
+	 * @param crtime       The creation time of the file.
+	 * @param atime        The accessed time of the file
+	 * @param mtime        The modified time of the file.
+	 * @param md5          The MD5 hash of the file
+	 * @param sha256       the SHA-256 hash of the file.
+	 * @param known        The known status of the file (can be null)
+	 * @param mimeType     The MIME type of the file
+	 * @param isFile       True, unless the file is a directory.
+	 * @param encodingType Type of encoding used on the file
+	 * @param osAccountId  OS account id (can be null)
+	 * @param ownerAccount Owner account (can be null)
+	 * @param parent       The parent of the file (e.g., a virtual directory)
+	 * @param transaction  A caller-managed transaction within which the add
+	 *                     file operations are performed.
+	 *
+	 * @return An object representing the local/logical file.
+	 *
+	 * @throws TskCoreException if there is an error completing a case database
+	 *                          operation.
+	 */
+	public LocalFile addLocalFile(String fileName, String localPath,
+			long size, long ctime, long crtime, long atime, long mtime,
+			String md5, String sha256, FileKnown known, String mimeType,
+			boolean isFile, TskData.EncodingType encodingType, Long osAccountId, String ownerAccount,
+			Content parent, CaseDbTransaction transaction) throws TskCoreException {
 		CaseDbConnection connection = transaction.getConnection();
 		Statement queryStatement = null;
 		try {
@@ -7018,8 +8133,8 @@ public LocalFile addLocalFile(String fileName, String localPath,
 			// Insert a row for the local/logical file into the tsk_files table.
 			// INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type,
 			// dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type,
-			// parent_path, data_source_obj_id,extension)
-			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?)
+			// parent_path, data_source_obj_id,extension, uid_str, os_account_obj_id)
+			// VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?,?, ?, ?)
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 			statement.clearParameters();
 			statement.setLong(1, objectId);
@@ -7063,13 +8178,25 @@ public LocalFile addLocalFile(String fileName, String localPath,
 				dataSourceObjId = parentFile.getDataSourceObjectId();
 			} else {
 				parentPath = "/";
-				dataSourceObjId = getDataSourceObjectId(connection, parent.getId());
+				dataSourceObjId = getDataSourceObjectId(connection, parent);
 			}
 			statement.setString(19, parentPath);
 			statement.setLong(20, dataSourceObjId);
 			final String extension = extractExtension(fileName);
 			statement.setString(21, extension);
 
+			if (ownerAccount != null) {
+				statement.setString(22, ownerAccount); // ownerUid
+			} else {
+				statement.setNull(22, java.sql.Types.VARCHAR);
+			}
+			
+			if (osAccountId != null) {
+				statement.setLong(23, osAccountId); // osAccountObjId
+			} else {
+				statement.setNull(23, java.sql.Types.BIGINT);
+			}
+
 			connection.executeUpdate(statement);
 			addFilePath(connection, objectId, localPath, encodingType);
 			LocalFile localFile = new LocalFile(this,
@@ -7086,7 +8213,8 @@ public LocalFile addLocalFile(String fileName, String localPath,
 					parent.getId(), parentPath,
 					dataSourceObjId,
 					localPath,
-					encodingType, extension);
+					encodingType, extension,
+					ownerAccount, osAccountId);
 			getTimelineManager().addEventsForNewFile(localFile, connection);
 			return localFile;
 
@@ -7097,6 +8225,53 @@ public LocalFile addLocalFile(String fileName, String localPath,
 		}
 	}
 
+	/**
+	 * Utility class to create keys for the cache used in isRootDirectory(). The
+	 * dataSourceId must be set but the fileSystemId can be null (for local
+	 * directories, for example).
+	 */
+	private class RootDirectoryKey {
+
+		private long dataSourceId;
+		private Long fileSystemId;
+
+		RootDirectoryKey(long dataSourceId, Long fileSystemId) {
+			this.dataSourceId = dataSourceId;
+			this.fileSystemId = fileSystemId;
+		}
+
+		@Override
+		public int hashCode() {
+			int hash = 7;
+			hash = 41 * hash + Objects.hashCode(dataSourceId);
+			hash = 41 * hash + Objects.hashCode(fileSystemId);
+			return hash;
+		}
+
+		@Override
+		public boolean equals(Object obj) {
+			if (this == obj) {
+				return true;
+			}
+			if (obj == null) {
+				return false;
+			}
+			if (getClass() != obj.getClass()) {
+				return false;
+			}
+
+			RootDirectoryKey otherKey = (RootDirectoryKey) obj;
+			if (dataSourceId != otherKey.dataSourceId) {
+				return false;
+			}
+
+			if (fileSystemId != null) {
+				return fileSystemId.equals(otherKey.fileSystemId);
+			}
+			return (otherKey.fileSystemId == null);
+		}
+	}
+
 	/**
 	 * Check whether a given AbstractFile is the "root" directory. True if the
 	 * AbstractFile either has no parent or its parent is an image, volume,
@@ -7110,6 +8285,29 @@ public LocalFile addLocalFile(String fileName, String localPath,
 	 * @throws TskCoreException
 	 */
 	private boolean isRootDirectory(AbstractFile file, CaseDbTransaction transaction) throws TskCoreException {
+
+		// First check if we know the root directory for this data source and optionally 
+		// file system. There is only one root, so if we know it we can simply compare 
+		// this file ID to the known root directory.
+		Long fsObjId = null;
+		if (file instanceof FsContent) {
+			fsObjId = ((FsContent) file).getFileSystemId();
+		}
+		RootDirectoryKey key = new RootDirectoryKey(file.getDataSourceObjectId(), fsObjId);
+		synchronized (rootDirectoryMapLock) {
+			if (rootDirectoryMap.containsKey(key)) {
+				return rootDirectoryMap.get(key).equals(file.getId());
+			}
+		}
+
+		// Fallback cache. We store the result of each database lookup 
+		// so it won't be done multiple times in a row. In practice, this will
+		// only be used if this method was never called on the root directory. 
+		Boolean isRoot = isRootDirectoryCache.getIfPresent(file.getId());
+		if (isRoot != null) {
+			return isRoot;
+		}
+
 		CaseDbConnection connection = transaction.getConnection();
 		Statement statement = null;
 		ResultSet resultSet = null;
@@ -7127,13 +8325,28 @@ private boolean isRootDirectory(AbstractFile file, CaseDbTransaction transaction
 					return true;
 				}
 				int type = resultSet.getInt("parent_type");
-				return (type == TskData.ObjectType.IMG.getObjectType()
+				boolean result = type == TskData.ObjectType.IMG.getObjectType()
 						|| type == TskData.ObjectType.VS.getObjectType()
 						|| type == TskData.ObjectType.VOL.getObjectType()
-						|| type == TskData.ObjectType.FS.getObjectType());
+						|| type == TskData.ObjectType.FS.getObjectType();
+				if (result == true) {
+					synchronized (rootDirectoryMapLock) {
+						// This is a root directory so save it
+						rootDirectoryMap.put(key, file.getId());
+					}
+				}
+				isRootDirectoryCache.put(file.getId(), result);
+				return result;
 
 			} else {
+				// This is a root directory so save it
+				synchronized (rootDirectoryMapLock) {
+					rootDirectoryMap.put(key, file.getId());
+				}
+				isRootDirectoryCache.put(file.getId(), true);
+
 				return true; // The file has no parent
+				
 			}
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Failed to lookup parent of file (%s) with id %d", file.getName(), file.getId()), ex);
@@ -7198,8 +8411,8 @@ public LayoutFile addLayoutFile(String fileName,
 			 * tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type,
 			 * meta_type, dir_flags, meta_flags, size, ctime, crtime, atime,
 			 * mtime, md5, known, mime_type, parent_path,
-			 * data_source_obj_id,extenion) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?,
-			 * ?, ?, ?, ?, ?, ?, ?,?)
+			 * data_source_obj_id,extenion, owner_uid, os_account_obj_id) VALUES
+			 * (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
 			 */
 			PreparedStatement prepStmt = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE);
 			prepStmt.clearParameters();
@@ -7238,6 +8451,10 @@ public LayoutFile addLayoutFile(String fileName,
 			prepStmt.setLong(20, parent.getDataSource().getId()); // data_source_obj_id
 
 			prepStmt.setString(21, extractExtension(fileName)); 				//extension
+
+			prepStmt.setString(22, OsAccount.NO_OWNER_ID); // ownerUid
+			prepStmt.setNull(23, java.sql.Types.BIGINT); // osAccountObjId
+
 			connection.executeUpdate(prepStmt);
 
 			/*
@@ -7272,7 +8489,9 @@ public LayoutFile addLayoutFile(String fileName,
 					null, null,
 					FileKnown.UNKNOWN,
 					parentPath,
-					null);
+					null,
+					OsAccount.NO_OWNER_ID,
+					OsAccount.NO_ACCOUNT);
 
 			transaction.commit();
 			transaction = null;
@@ -7293,15 +8512,35 @@ public LayoutFile addLayoutFile(String fileName,
 			}
 		}
 	}
-
+	
 	/**
-	 * Given an object id, works up the tree of ancestors to the data source for
-	 * the object and gets the object id of the data source. The trivial case
-	 * where the input object id is for a source is handled.
-	 *
+	 * Given a Content object, return its data source object ID.
+	 * For AbstractFiles, this simply returns the data source ID field.
+	 * 
 	 * @param connection A case database connection.
-	 * @param objectId   An object id.
-	 *
+	 * @param content    The content to look up the data source object ID.
+	 * 
+	 * @return 
+	 */
+	private long getDataSourceObjectId(CaseDbConnection connection, Content content) throws TskCoreException {
+		if (content == null) {
+			throw new TskCoreException("Null Content parameter given");
+		}
+		if (content instanceof AbstractFile) {
+			return ((AbstractFile)content).getDataSourceObjectId();
+		} else {
+			return getDataSourceObjectId(connection, content.getId());
+		}
+	}
+
+	/**
+	 * Given an object id, works up the tree of ancestors to the data source for
+	 * the object and gets the object id of the data source. The trivial case
+	 * where the input object id is for a source is handled.
+	 *
+	 * @param connection A case database connection.
+	 * @param objectId   An object id.
+	 *
 	 * @return A data source object id.
 	 *
 	 * @throws TskCoreException if there is an error querying the case database.
@@ -7377,10 +8616,8 @@ private void updateFilePath(CaseDbConnection connection, long objId, String path
 	}
 
 	/**
-	 * Find all files in the data source, by name and parent
+	 * Find all files by name and parent
 	 *
-	 * @param dataSource the dataSource (Image, parent-less VirtualDirectory) to
-	 *                   search for the given file name
 	 * @param fileName   Pattern of the name of the file or directory to match
 	 *                   (case insensitive, used in LIKE SQL statement).
 	 * @param parentFile Object for parent file/directory to find children in
@@ -7391,8 +8628,43 @@ private void updateFilePath(CaseDbConnection connection, long objId, String path
 	 *
 	 * @throws org.sleuthkit.datamodel.TskCoreException
 	 */
-	public List<AbstractFile> findFiles(Content dataSource, String fileName, AbstractFile parentFile) throws TskCoreException {
-		return findFiles(dataSource, fileName, parentFile.getName());
+	public List<AbstractFile> findFilesInFolder(String fileName, AbstractFile parentFile) throws TskCoreException {
+		String ext = "";
+		if (!containsLikeWildcard(fileName)) {
+			ext = SleuthkitCase.extractExtension(fileName);	
+		}
+		
+		CaseDbConnection connection = null;
+		ResultSet rs = null;
+		long parentId = parentFile.getId();
+		
+		acquireSingleUserCaseReadLock();
+		try {
+			connection = connections.getConnection();
+			
+			PreparedStatement statement;
+			if (ext.isEmpty()) {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_PARENT_AND_NAME);
+				statement.clearParameters();
+				statement.setLong(1, parentId);
+				statement.setString(2, fileName);	
+			} else {
+				statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_FILES_BY_EXTENSION_AND_PARENT_AND_NAME);
+				statement.clearParameters();
+				statement.setString(1, ext);
+				statement.setLong(2, parentId);
+				statement.setString(3, fileName);	
+			}
+
+			rs = connection.executeQuery(statement);
+			return resultSetToAbstractFiles(rs, connection);
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting AbstractFile children with name=" + fileName + " for Content parent with ID=" + parentFile.getId(), ex);
+		} finally {
+			closeResultSet(rs);
+			closeConnection(connection);
+			releaseSingleUserCaseReadLock();
+		}
 	}
 
 	/**
@@ -7407,11 +8679,12 @@ public List<AbstractFile> findFiles(Content dataSource, String fileName, Abstrac
 	 * @throws TskCoreException \ref query_database_page
 	 */
 	public long countFilesWhere(String sqlWhereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS
 			rs.next();
@@ -7421,7 +8694,7 @@ public long countFilesWhere(String sqlWhereClause) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7444,11 +8717,12 @@ public long countFilesWhere(String sqlWhereClause) throws TskCoreException {
 	 * @throws TskCoreException \ref query_database_page
 	 */
 	public List<AbstractFile> findAllFilesWhere(String sqlWhereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS
 			return resultSetToAbstractFiles(rs, connection);
@@ -7457,20 +8731,20 @@ public List<AbstractFile> findAllFilesWhere(String sqlWhereClause) throws TskCor
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
-	
+
 	/**
 	 * Find and return list of all (abstract) files matching the specific Where
-	 * clause with the give parentId. You need to know the database schema to 
+	 * clause with the give parentId. You need to know the database schema to
 	 * use this, which is outlined on the
 	 * <a href="http://wiki.sleuthkit.org/index.php?title=SQLite_Database_v3_Schema">wiki</a>.
 	 * You should use enums from org.sleuthkit.datamodel.TskData to make the
 	 * queries easier to maintain and understand.
-	 * 
-	 * @param parentId The parentId 
+	 *
+	 * @param parentId       The parentId
 	 * @param sqlWhereClause a SQL where clause appropriate for the desired
 	 *                       files (do not begin the WHERE clause with the word
 	 *                       WHERE!)
@@ -7480,19 +8754,17 @@ public List<AbstractFile> findAllFilesWhere(String sqlWhereClause) throws TskCor
 	 *
 	 * @throws TskCoreException \ref query_database_page
 	 */
-	public List<AbstractFile> findAllFilesInFolderWhere(long parentId, String sqlWhereClause) throws TskCoreException{
-		String queryTemplate =  "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE par_obj_id = %d AND %s";
-		
-		try(CaseDbConnection connection = connections.getConnection()) {
-			acquireSingleUserCaseReadLock();
-			
+	public List<AbstractFile> findAllFilesInFolderWhere(long parentId, String sqlWhereClause) throws TskCoreException {
+		String queryTemplate = "SELECT tsk_files.* FROM tsk_files JOIN tsk_objects ON tsk_objects.obj_id = tsk_files.obj_id WHERE par_obj_id = %d AND %s";
+		acquireSingleUserCaseReadLock();
+		try (CaseDbConnection connection = connections.getConnection()) {
 			String query = String.format(queryTemplate, parentId, sqlWhereClause);
-			try(Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) {
+			try (Statement s = connection.createStatement(); ResultSet rs = connection.executeQuery(s, query)) {
 				return resultSetToAbstractFiles(rs, connection);
-			} catch(SQLException ex) {
+			} catch (SQLException ex) {
 				throw new TskCoreException("SQLException thrown when calling 'SleuthkitCase.findAllFilesInFolderWhere(): " + query, ex);
 			}
-		}finally {
+		} finally {
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7510,14 +8782,15 @@ public List<AbstractFile> findAllFilesInFolderWhere(long parentId, String sqlWhe
 	 * @throws TskCoreException \ref query_database_page
 	 */
 	public List<Long> findAllFileIdsWhere(String sqlWhereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT obj_id FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS
-			List<Long> ret = new ArrayList<Long>();
+			List<Long> ret = new ArrayList<>();
 			while (rs.next()) {
 				ret.add(rs.getLong("obj_id"));
 			}
@@ -7527,7 +8800,7 @@ public List<Long> findAllFileIdsWhere(String sqlWhereClause) throws TskCoreExcep
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7575,11 +8848,12 @@ public List<AbstractFile> openFiles(Content dataSource, String filePath) throws
 	 *                          core
 	 */
 	public List<TskFileRange> getFileRanges(long id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_file_layout WHERE obj_id = " + id + " ORDER BY sequence");
 			List<TskFileRange> ranges = new ArrayList<TskFileRange>();
@@ -7594,7 +8868,7 @@ public List<TskFileRange> getFileRanges(long id) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7610,11 +8884,12 @@ public List<TskFileRange> getFileRanges(long id) throws TskCoreException {
 	 *                          core
 	 */
 	public Image getImageById(long id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT tsk_image_info.type, tsk_image_info.ssize, tsk_image_info.tzone, tsk_image_info.size, tsk_image_info.md5, tsk_image_info.sha1, tsk_image_info.sha256, tsk_image_info.display_name, data_source_info.device_id, tsk_image_names.name "
 					+ "FROM tsk_image_info "
@@ -7667,7 +8942,7 @@ public Image getImageById(long id) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7684,11 +8959,12 @@ public Image getImageById(long id) throws TskCoreException {
 	 *                          core
 	 */
 	VolumeSystem getVolumeSystemById(long id, Content parent) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_vs_info " //NON-NLS
 					+ "where obj_id = " + id); //NON-NLS
@@ -7707,7 +8983,7 @@ VolumeSystem getVolumeSystemById(long id, Content parent) throws TskCoreExceptio
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7853,11 +9129,12 @@ private FileSystem getFileSystemByIdHelper(long id, Content parent) throws TskCo
 				return fileSystemIdMap.get(id);
 			}
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_fs_info " //NON-NLS
 					+ "where obj_id = " + id); //NON-NLS
@@ -7880,7 +9157,7 @@ private FileSystem getFileSystemByIdHelper(long id, Content parent) throws TskCo
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7897,11 +9174,12 @@ private FileSystem getFileSystemByIdHelper(long id, Content parent) throws TskCo
 	 *                          core
 	 */
 	Volume getVolumeById(long id, VolumeSystem parent) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_vs_parts " //NON-NLS
 					+ "where obj_id = " + id); //NON-NLS
@@ -7932,7 +9210,7 @@ Volume getVolumeById(long id, VolumeSystem parent) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -7963,11 +9241,12 @@ Volume getVolumeById(long id, long parentId) throws TskCoreException {
 	 *                          core
 	 */
 	Directory getDirectoryById(long id, FileSystem parentFs) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();	
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_files " //NON-NLS
 					+ "WHERE obj_id = " + id);
@@ -7991,7 +9270,7 @@ Directory getDirectoryById(long id, FileSystem parentFs) throws TskCoreException
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -8007,13 +9286,14 @@ Directory getDirectoryById(long id, FileSystem parentFs) throws TskCoreException
 	 */
 	public Collection<FileSystem> getImageFileSystems(Image image) throws TskCoreException {
 		List<FileSystem> fileSystems = new ArrayList<>();
-		CaseDbConnection connection = connections.getConnection();
-
-		acquireSingleUserCaseReadLock();
+		String queryStr = "SELECT * FROM tsk_fs_info WHERE data_source_obj_id = " + image.getId();
+		
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
-		String queryStr = "SELECT * FROM tsk_fs_info WHERE data_source_obj_id = " + image.getId();
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, queryStr); //NON-NLS
 			while (rs.next()) {
@@ -8029,7 +9309,7 @@ public Collection<FileSystem> getImageFileSystems(Image image) throws TskCoreExc
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return fileSystems;
@@ -8315,7 +9595,25 @@ List<Long> getVolumeChildrenIds(Volume vol) throws TskCoreException {
 	 *                          database.
 	 */
 	public Image addImageInfo(long deviceObjId, List<String> imageFilePaths, String timeZone) throws TskCoreException {
-		long imageId = this.caseHandle.addImageInfo(deviceObjId, imageFilePaths, timeZone, this);
+		return addImageInfo(deviceObjId, imageFilePaths, timeZone, null);
+	}
+
+	/**
+	 * Adds an image to the case database.
+	 *
+	 * @param deviceObjId    The object id of the device associated with the
+	 *                       image.
+	 * @param imageFilePaths The image file paths.
+	 * @param timeZone       The time zone for the image.
+	 * @param host           The host for this image.
+	 *
+	 * @return An Image object.
+	 *
+	 * @throws TskCoreException if there is an error adding the image to case
+	 *                          database.
+	 */
+	public Image addImageInfo(long deviceObjId, List<String> imageFilePaths, String timeZone, Host host) throws TskCoreException {
+		long imageId = this.caseHandle.addImageInfo(deviceObjId, imageFilePaths, timeZone, host, this);
 		return getImageById(imageId);
 	}
 
@@ -8329,14 +9627,15 @@ public Image addImageInfo(long deviceObjId, List<String> imageFilePaths, String
 	 *                          core
 	 */
 	public Map<Long, List<String>> getImagePaths() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s1 = null;
 		ResultSet rs1 = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s1 = connection.createStatement();
-			rs1 = connection.executeQuery(s1, "SELECT tsk_image_info.obj_id, tsk_image_names.name FROM tsk_image_info " +
-				"LEFT JOIN tsk_image_names ON tsk_image_info.obj_id = tsk_image_names.obj_id"); //NON-NLS
+			rs1 = connection.executeQuery(s1, "SELECT tsk_image_info.obj_id, tsk_image_names.name FROM tsk_image_info "
+					+ "LEFT JOIN tsk_image_names ON tsk_image_info.obj_id = tsk_image_names.obj_id"); //NON-NLS
 			Map<Long, List<String>> imgPaths = new LinkedHashMap<Long, List<String>>();
 			while (rs1.next()) {
 				long obj_id = rs1.getLong("obj_id"); //NON-NLS
@@ -8352,7 +9651,7 @@ public Map<Long, List<String>> getImagePaths() throws TskCoreException {
 					if (name != null) {
 						imagePaths.add(name);
 					}
-				}				
+				}
 			}
 			return imgPaths;
 		} catch (SQLException ex) {
@@ -8360,7 +9659,7 @@ public Map<Long, List<String>> getImagePaths() throws TskCoreException {
 		} finally {
 			closeResultSet(rs1);
 			closeStatement(s1);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -8368,16 +9667,16 @@ public Map<Long, List<String>> getImagePaths() throws TskCoreException {
 	/**
 	 * Returns a list of fully qualified file paths based on an image object ID.
 	 *
-	 * @param objectId The object id of the data source.
+	 * @param objectId   The object id of the data source.
+	 * @param connection Database connection to use.
 	 *
 	 * @return List of file paths.
 	 *
 	 * @throws TskCoreException Thrown if a critical error occurred within tsk
 	 *                          core
 	 */
-	private List<String> getImagePathsById(long objectId) throws TskCoreException {
-		List<String> imagePaths = new ArrayList<String>();
-		CaseDbConnection connection = connections.getConnection();
+	private List<String> getImagePathsById(long objectId, CaseDbConnection connection) throws TskCoreException {
+		List<String> imagePaths = new ArrayList<>();
 		acquireSingleUserCaseReadLock();
 		Statement statement = null;
 		ResultSet resultSet = null;
@@ -8392,7 +9691,6 @@ private List<String> getImagePathsById(long objectId) throws TskCoreException {
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -8406,11 +9704,12 @@ private List<String> getImagePathsById(long objectId) throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	public List<Image> getImages() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT obj_id FROM tsk_image_info"); //NON-NLS
 			Collection<Long> imageIDs = new ArrayList<Long>();
@@ -8427,7 +9726,7 @@ public List<Image> getImages() throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -8443,10 +9742,11 @@ public List<Image> getImages() throws TskCoreException {
 	 *                          within tsk core and the update fails
 	 */
 	public void setImagePaths(long obj_id, List<String> paths) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		acquireSingleUserCaseWriteLock();
-		PreparedStatement statement = null;
+		PreparedStatement statement;
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 			statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_IMAGE_NAME);
 			statement.clearParameters();
@@ -8462,10 +9762,10 @@ public void setImagePaths(long obj_id, List<String> paths) throws TskCoreExcepti
 			}
 			connection.commitTransaction();
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException("Error updating image paths.", ex);
 		} finally {
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -8482,10 +9782,27 @@ public void setImagePaths(long obj_id, List<String> paths) throws TskCoreExcepti
 	 *                          within tsk core and the update fails
 	 */
 	void deleteDataSource(long dataSourceObjectId) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		Statement statement = null;
+		
+		// Check if this data source is the only one associated with its host. If so,
+		// we will delete the host and other associated data.
+		// Note that the cascading deletes were only added in schema 9.1, so we
+		// would get an error trying to delete a host from older cases.
+		Host hostToDelete = null;
+		VersionNumber version = getDBSchemaCreationVersion();
+		int major = version.getMajor();
+		int minor = version.getMinor();
+		if(major > 9 || (major == 9 && minor >= 1)) {
+			hostToDelete = getHostManager().getHostByDataSource(dataSourceObjectId);
+			if (getHostManager().getDataSourcesForHost(hostToDelete).size() != 1) {
+				hostToDelete = null;
+			}
+		}
+		
+		CaseDbConnection connection = null;
+		Statement statement;
 		acquireSingleUserCaseWriteLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			connection.beginTransaction();
 			// The following delete(s) uses a foreign key delete with cascade in the DB so that it will delete
@@ -8497,12 +9814,25 @@ void deleteDataSource(long dataSourceObjectId) throws TskCoreException {
 					+ "WHERE account_id NOT IN (SELECT account1_id FROM account_relationships) "
 					+ "AND account_id NOT IN (SELECT account2_id FROM account_relationships))";
 			statement.execute(accountSql);
+			
+			// Now delete any host that was only associated with this data source. This will cascade to delete
+			// realms, os accounts, and os account attributes that were associated with the host.
+			if (hostToDelete != null) {
+				statement.execute("DELETE FROM tsk_hosts WHERE id = " + hostToDelete.getHostId());
+				
+				// Clean up any stray OS Account objects
+				String deleteOsAcctObjectsQuery = "DELETE FROM tsk_objects " +
+					"WHERE type=" + TskData.ObjectType.OS_ACCOUNT.getObjectType() + " " + 
+					"AND obj_id NOT IN (SELECT os_account_obj_id FROM tsk_os_accounts WHERE  os_account_obj_id IS NOT NULL)";
+				statement.execute(deleteOsAcctObjectsQuery);
+			}
+			
 			connection.commitTransaction();
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException("Error deleting data source.", ex);
 		} finally {
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -8532,7 +9862,7 @@ void deleteDataSource(long dataSourceObjectId) throws TskCoreException {
 	 * @throws SQLException Thrown if there is a problem iterating through the
 	 *                      record set.
 	 */
-	private List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnection connection) throws SQLException {
+	List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnection connection) throws SQLException {
 		ArrayList<AbstractFile> results = new ArrayList<AbstractFile>();
 		try {
 			while (rs.next()) {
@@ -8562,6 +9892,12 @@ private List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnecti
 					if (parentPath == null) {
 						parentPath = "/"; //NON-NLS
 					}
+
+					Long osAccountObjId = rs.getLong("os_account_obj_id");
+					if (rs.wasNull()) {
+						osAccountObjId = null;
+					}
+
 					LayoutFile lf = new LayoutFile(this,
 							rs.getLong("obj_id"), //NON-NLS
 							rs.getLong("data_source_obj_id"),
@@ -8571,7 +9907,9 @@ private List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnecti
 							TSK_FS_NAME_FLAG_ENUM.valueOf(rs.getShort("dir_flags")), rs.getShort("meta_flags"), //NON-NLS
 							rs.getLong("size"), //NON-NLS
 							rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
-							rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type")); //NON-NLS
+							rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), parentPath,
+							rs.getString("mime_type"),
+							rs.getString("owner_uid"), osAccountObjId); //NON-NLS
 					results.add(lf);
 				} else if (type == TSK_DB_FILES_TYPE_ENUM.DERIVED.getFileType()) {
 					final DerivedFile df;
@@ -8606,6 +9944,11 @@ private List<AbstractFile> resultSetToAbstractFiles(ResultSet rs, CaseDbConnecti
 	 * @throws SQLException
 	 */
 	org.sleuthkit.datamodel.File file(ResultSet rs, FileSystem fs) throws SQLException {
+		Long osAccountObjId = rs.getLong("os_account_obj_id");
+		if (rs.wasNull()) {
+			osAccountObjId = null;
+		}
+
 		org.sleuthkit.datamodel.File f = new org.sleuthkit.datamodel.File(this, rs.getLong("obj_id"), //NON-NLS
 				rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS
 				TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS
@@ -8617,7 +9960,7 @@ org.sleuthkit.datamodel.File file(ResultSet rs, FileSystem fs) throws SQLExcepti
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				(short) rs.getInt("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS
 				rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
-				rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension")); //NON-NLS
+				rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension"), rs.getString("owner_uid"), osAccountObjId, Collections.emptyList()); //NON-NLS
 		f.setFileSystem(fs);
 		return f;
 	}
@@ -8634,6 +9977,11 @@ org.sleuthkit.datamodel.File file(ResultSet rs, FileSystem fs) throws SQLExcepti
 	 * @throws SQLException thrown if SQL error occurred
 	 */
 	Directory directory(ResultSet rs, FileSystem fs) throws SQLException {
+		Long osAccountObjId = rs.getLong("os_account_obj_id");
+		if (rs.wasNull()) {
+			osAccountObjId = null;
+		}
+
 		Directory dir = new Directory(this, rs.getLong("obj_id"), rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS
 				TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS
 				rs.getInt("attr_id"), rs.getString("name"), rs.getLong("meta_addr"), rs.getInt("meta_seq"), //NON-NLS
@@ -8644,7 +9992,7 @@ Directory directory(ResultSet rs, FileSystem fs) throws SQLException {
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				rs.getShort("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS
 				rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
-				rs.getString("parent_path")); //NON-NLS
+				rs.getString("parent_path"), rs.getString("owner_uid"), osAccountObjId); //NON-NLS
 		dir.setFileSystem(fs);
 		return dir;
 	}
@@ -8781,6 +10129,12 @@ private DerivedFile derivedFile(ResultSet rs, CaseDbConnection connection, long
 		if (parentPath == null) {
 			parentPath = "";
 		}
+
+		Long osAccountObjId = rs.getLong("os_account_obj_id");
+		if (rs.wasNull()) {
+			osAccountObjId = null;
+		}
+
 		final DerivedFile df = new DerivedFile(this, objId, rs.getLong("data_source_obj_id"),
 				rs.getString("name"), //NON-NLS
 				TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS
@@ -8790,7 +10144,8 @@ private DerivedFile derivedFile(ResultSet rs, CaseDbConnection connection, long
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
 				parentPath, localPath, parentId, rs.getString("mime_type"),
-				encodingType, rs.getString("extension"));
+				encodingType, rs.getString("extension"),
+				rs.getString("owner_uid"), osAccountObjId);
 		return df;
 	}
 
@@ -8834,6 +10189,11 @@ private LocalFile localFile(ResultSet rs, CaseDbConnection connection, long pare
 		if (null == parentPath) {
 			parentPath = "";
 		}
+		Long osAccountObjId = rs.getLong("os_account_obj_id");
+		if (rs.wasNull()) {
+			osAccountObjId = null;
+		}
+
 		LocalFile file = new LocalFile(this, objId, rs.getString("name"), //NON-NLS
 				TSK_DB_FILES_TYPE_ENUM.valueOf(rs.getShort("type")), //NON-NLS
 				TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")), //NON-NLS
@@ -8843,7 +10203,8 @@ private LocalFile localFile(ResultSet rs, CaseDbConnection connection, long pare
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				rs.getString("mime_type"), rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
 				parentId, parentPath, rs.getLong("data_source_obj_id"),
-				localPath, encodingType, rs.getString("extension"));
+				localPath, encodingType, rs.getString("extension"),
+				rs.getString("owner_uid"), osAccountObjId);
 		return file;
 	}
 
@@ -8859,6 +10220,10 @@ private LocalFile localFile(ResultSet rs, CaseDbConnection connection, long pare
 	 * @throws SQLException
 	 */
 	org.sleuthkit.datamodel.SlackFile slackFile(ResultSet rs, FileSystem fs) throws SQLException {
+		Long osAccountObjId = rs.getLong("os_account_obj_id");
+		if (rs.wasNull()) {
+			osAccountObjId = null;
+		}
 		org.sleuthkit.datamodel.SlackFile f = new org.sleuthkit.datamodel.SlackFile(this, rs.getLong("obj_id"), //NON-NLS
 				rs.getLong("data_source_obj_id"), rs.getLong("fs_obj_id"), //NON-NLS
 				TskData.TSK_FS_ATTR_TYPE_ENUM.valueOf(rs.getShort("attr_type")), //NON-NLS
@@ -8870,7 +10235,8 @@ org.sleuthkit.datamodel.SlackFile slackFile(ResultSet rs, FileSystem fs) throws
 				rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"), //NON-NLS
 				(short) rs.getInt("mode"), rs.getInt("uid"), rs.getInt("gid"), //NON-NLS
 				rs.getString("md5"), rs.getString("sha256"), FileKnown.valueOf(rs.getByte("known")), //NON-NLS
-				rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension")); //NON-NLS
+				rs.getString("parent_path"), rs.getString("mime_type"), rs.getString("extension"),
+				rs.getString("owner_uid"), osAccountObjId); //NON-NLS
 		f.setFileSystem(fs);
 		return f;
 	}
@@ -8924,6 +10290,10 @@ List<Content> fileChildren(ResultSet rs, CaseDbConnection connection, long paren
 						if (parentPath == null) {
 							parentPath = "";
 						}
+						Long osAccountObjId = rs.getLong("os_account_obj_id");
+						if (rs.wasNull()) {
+							osAccountObjId = null;
+						}
 						final LayoutFile lf = new LayoutFile(this, rs.getLong("obj_id"),
 								rs.getLong("data_source_obj_id"), rs.getString("name"), type,
 								TSK_FS_NAME_TYPE_ENUM.valueOf(rs.getShort("dir_type")),
@@ -8932,7 +10302,8 @@ List<Content> fileChildren(ResultSet rs, CaseDbConnection connection, long paren
 								rs.getLong("size"),
 								rs.getLong("ctime"), rs.getLong("crtime"), rs.getLong("atime"), rs.getLong("mtime"),
 								rs.getString("md5"), rs.getString("sha256"),
-								FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type"));
+								FileKnown.valueOf(rs.getByte("known")), parentPath, rs.getString("mime_type"),
+								rs.getString("owner_uid"), osAccountObjId);
 						children.add(lf);
 						break;
 					}
@@ -9056,9 +10427,10 @@ CaseDbConnection getConnection() throws TskCoreException {
 
 	/**
 	 * Gets the string used to identify this case in the JNI cache.
-	 * 
+	 *
 	 * @return The string for this case
-	 * @throws TskCoreException 
+	 *
+	 * @throws TskCoreException
 	 */
 	String getCaseHandleIdentifier() {
 		return caseHandleIdentifier;
@@ -9117,11 +10489,9 @@ public boolean setKnown(AbstractFile file, FileKnown fileKnown) throws TskCoreEx
 		if (currentKnown.compareTo(fileKnown) > 0) {
 			return false;
 		}
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		Statement statement = null;
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = connections.getConnection();
+			Statement statement = connection.createStatement();) {
 			connection.executeUpdate(statement, "UPDATE tsk_files " //NON-NLS
 					+ "SET known='" + fileKnown.getFileKnownValue() + "' " //NON-NLS
 					+ "WHERE obj_id=" + id); //NON-NLS
@@ -9130,8 +10500,6 @@ public boolean setKnown(AbstractFile file, FileKnown fileKnown) throws TskCoreEx
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting Known status.", ex);
 		} finally {
-			closeStatement(statement);
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 		return true;
@@ -9146,10 +10514,8 @@ public boolean setKnown(AbstractFile file, FileKnown fileKnown) throws TskCoreEx
 	 * @throws TskCoreException If there is an error updating the case database.
 	 */
 	void setFileName(String name, long objId) throws TskCoreException {
-
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement preparedStatement = connection.getPreparedStatement(SleuthkitCase.PREPARED_STATEMENT.UPDATE_FILE_NAME);
 			preparedStatement.clearParameters();
 			preparedStatement.setString(1, name);
@@ -9158,7 +10524,6 @@ void setFileName(String name, long objId) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error updating while the name for object ID %d to %s", objId, name), ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9172,10 +10537,8 @@ void setFileName(String name, long objId) throws TskCoreException {
 	 * @throws TskCoreException If there is an error updating the case database.
 	 */
 	void setImageName(String name, long objId) throws TskCoreException {
-
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement preparedStatement = connection.getPreparedStatement(SleuthkitCase.PREPARED_STATEMENT.UPDATE_IMAGE_NAME);
 			preparedStatement.clearParameters();
 			preparedStatement.setString(1, name);
@@ -9184,7 +10547,37 @@ void setImageName(String name, long objId) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error updating while the name for object ID %d to %s", objId, name), ex);
 		} finally {
-			connection.close();
+			releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Updates the image's total size and sector size.This function may be used
+	 * to update the sizes after the image was created.
+	 *
+	 * Can only update the sizes if they were not set before. Will throw
+	 * TskCoreException if the values in the db are not 0 prior to this call.
+	 *
+	 * @param imgage     The image that needs to be updated
+	 * @param totalSize  The total size
+	 * @param sectorSize The sector size
+	 *
+	 * @throws TskCoreException If there is an error updating the case database.
+	 *
+	 */
+	void setImageSizes(Image image, long totalSize, long sectorSize) throws TskCoreException {
+
+		acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = connections.getConnection();) {
+			PreparedStatement preparedStatement = connection.getPreparedStatement(SleuthkitCase.PREPARED_STATEMENT.UPDATE_IMAGE_SIZES);
+			preparedStatement.clearParameters();
+			preparedStatement.setLong(1, totalSize);
+			preparedStatement.setLong(2, sectorSize);
+			preparedStatement.setLong(3, image.getId());
+			connection.executeUpdate(preparedStatement);
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error updating image sizes to %d and sector size to %d for object ID %d ", totalSize, sectorSize, image.getId()), ex);
+		} finally {
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9199,20 +10592,52 @@ void setImageName(String name, long objId) throws TskCoreException {
 	 * @throws TskCoreException If there is an error updating the case database.
 	 */
 	public void setFileMIMEType(AbstractFile file, String mimeType) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		Statement statement = null;
-		ResultSet rs = null;
 		acquireSingleUserCaseWriteLock();
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = connections.getConnection();
+				Statement statement = connection.createStatement()) {
 			connection.executeUpdate(statement, String.format("UPDATE tsk_files SET mime_type = '%s' WHERE obj_id = %d", mimeType, file.getId()));
 			file.setMIMEType(mimeType);
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error setting MIME type for file (obj_id = %s)", file.getId()), ex);
 		} finally {
-			closeResultSet(rs);
-			closeStatement(statement);
-			connection.close();
+			releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Sets the unalloc meta flags for the file in the case database, and
+	 * updates the meta flags in given file object. Also updates the dir flag to
+	 * unalloc.
+	 *
+	 * @param file A file.
+	 *
+	 *
+	 * @throws TskCoreException If there is an error updating the case database.
+	 */
+	public void setFileUnalloc(AbstractFile file) throws TskCoreException {
+
+		// get the flags, reset the ALLOC flag, and set the UNALLOC flag
+		short metaFlag = file.getMetaFlagsAsInt();
+		Set<TSK_FS_META_FLAG_ENUM> metaFlagAsSet = TSK_FS_META_FLAG_ENUM.valuesOf(metaFlag);
+		metaFlagAsSet.remove(TSK_FS_META_FLAG_ENUM.ALLOC);
+		metaFlagAsSet.add(TSK_FS_META_FLAG_ENUM.UNALLOC);
+
+		short newMetaFlgs = TSK_FS_META_FLAG_ENUM.toInt(metaFlagAsSet);
+		short newDirFlags = TSK_FS_NAME_FLAG_ENUM.UNALLOC.getValue();
+
+		acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = connections.getConnection();
+			Statement statement = connection.createStatement();) {
+			connection.executeUpdate(statement, String.format("UPDATE tsk_files SET meta_flags = '%d', dir_flags = '%d'  WHERE obj_id = %d", newMetaFlgs, newDirFlags, file.getId()));
+
+			file.removeMetaFlag(TSK_FS_META_FLAG_ENUM.ALLOC);
+			file.setMetaFlag(TSK_FS_META_FLAG_ENUM.UNALLOC);
+
+			file.setDirFlag(TSK_FS_NAME_FLAG_ENUM.UNALLOC);
+
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Error setting unalloc meta flag for file (obj_id = %s)", file.getId()), ex);
+		} finally {
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9231,9 +10656,8 @@ void setMd5Hash(AbstractFile file, String md5Hash) throws TskCoreException {
 			return;
 		}
 		long id = file.getId();
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_FILE_MD5);
 			statement.clearParameters();
 			statement.setString(1, md5Hash.toLowerCase());
@@ -9243,7 +10667,6 @@ void setMd5Hash(AbstractFile file, String md5Hash) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting MD5 hash", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9262,9 +10685,8 @@ void setMd5ImageHash(Image img, String md5Hash) throws TskCoreException {
 			return;
 		}
 		long id = img.getId();
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_MD5);
 			statement.clearParameters();
 			statement.setString(1, md5Hash.toLowerCase());
@@ -9273,7 +10695,6 @@ void setMd5ImageHash(Image img, String md5Hash) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting MD5 hash", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9290,11 +10711,13 @@ void setMd5ImageHash(Image img, String md5Hash) throws TskCoreException {
 	 */
 	String getMd5ImageHash(Image img) throws TskCoreException {
 		long id = img.getId();
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
 		String hash = "";
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_IMAGE_MD5);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -9307,7 +10730,7 @@ String getMd5ImageHash(Image img) throws TskCoreException {
 			throw new TskCoreException("Error getting MD5 hash", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9326,9 +10749,8 @@ void setSha1ImageHash(Image img, String sha1Hash) throws TskCoreException {
 			return;
 		}
 		long id = img.getId();
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_SHA1);
 			statement.clearParameters();
 			statement.setString(1, sha1Hash.toLowerCase());
@@ -9337,7 +10759,6 @@ void setSha1ImageHash(Image img, String sha1Hash) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting SHA1 hash", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9354,11 +10775,13 @@ void setSha1ImageHash(Image img, String sha1Hash) throws TskCoreException {
 	 */
 	String getSha1ImageHash(Image img) throws TskCoreException {
 		long id = img.getId();
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
 		String hash = "";
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_IMAGE_SHA1);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -9371,7 +10794,7 @@ String getSha1ImageHash(Image img) throws TskCoreException {
 			throw new TskCoreException("Error getting SHA1 hash", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9390,9 +10813,8 @@ void setSha256ImageHash(Image img, String sha256Hash) throws TskCoreException {
 			return;
 		}
 		long id = img.getId();
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_SHA256);
 			statement.clearParameters();
 			statement.setString(1, sha256Hash.toLowerCase());
@@ -9401,7 +10823,6 @@ void setSha256ImageHash(Image img, String sha256Hash) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting SHA256 hash", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9418,11 +10839,13 @@ void setSha256ImageHash(Image img, String sha256Hash) throws TskCoreException {
 	 */
 	String getSha256ImageHash(Image img) throws TskCoreException {
 		long id = img.getId();
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
 		String hash = "";
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_IMAGE_SHA256);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -9435,7 +10858,7 @@ String getSha256ImageHash(Image img) throws TskCoreException {
 			throw new TskCoreException("Error setting SHA256 hash", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9451,9 +10874,8 @@ String getSha256ImageHash(Image img) throws TskCoreException {
 	void setAcquisitionDetails(DataSource datasource, String details) throws TskCoreException {
 
 		long id = datasource.getId();
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ACQUISITION_DETAILS);
 			statement.clearParameters();
 			statement.setString(1, details);
@@ -9462,22 +10884,50 @@ void setAcquisitionDetails(DataSource datasource, String details) throws TskCore
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting acquisition details", ex);
 		} finally {
-			connection.close();
+			releaseSingleUserCaseWriteLock();
+		}
+	}
+
+	/**
+	 * Sets the acquisition tool details such as its name, version number and
+	 * any settings used during the acquisition to acquire data.
+	 *
+	 * @param datasource The datasource object
+	 * @param name       The name of the acquisition tool. May be NULL.
+	 * @param version    The acquisition tool version number. May be NULL.
+	 * @param settings   The settings used by the acquisition tool. May be NULL.
+	 *
+	 * @throws TskCoreException Thrown if the database write fails
+	 */
+	void setAcquisitionToolDetails(DataSource datasource, String name, String version, String settings) throws TskCoreException {
+
+		long id = datasource.getId();
+		acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = connections.getConnection();) {
+			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ACQUISITION_TOOL_SETTINGS);
+			statement.clearParameters();
+			statement.setString(1, settings);
+			statement.setString(2, name);
+			statement.setString(3, version);
+			statement.setLong(4, id);
+			connection.executeUpdate(statement);
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error setting acquisition details", ex);
+		} finally {
 			releaseSingleUserCaseWriteLock();
 		}
 	}
 
 	/**
 	 * Set the acquisition details in the data_source_info table.
-	 * 
+	 *
 	 * @param dataSourceId The data source ID.
 	 * @param details      The acquisition details.
 	 * @param trans        The current transaction.
-	 * 
-	 * @throws TskCoreException 
+	 *
+	 * @throws TskCoreException
 	 */
 	void setAcquisitionDetails(long dataSourceId, String details, CaseDbTransaction trans) throws TskCoreException {
-		acquireSingleUserCaseWriteLock();
 		try {
 			CaseDbConnection connection = trans.getConnection();
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_ACQUISITION_DETAILS);
@@ -9487,11 +10937,9 @@ void setAcquisitionDetails(long dataSourceId, String details, CaseDbTransaction
 			connection.executeUpdate(statement);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting acquisition details", ex);
-		} finally {
-			releaseSingleUserCaseWriteLock();
 		}
 	}
-	
+
 	/**
 	 * Get the acquisition details from the data_source_info table
 	 *
@@ -9503,11 +10951,13 @@ void setAcquisitionDetails(long dataSourceId, String details, CaseDbTransaction
 	 */
 	String getAcquisitionDetails(DataSource datasource) throws TskCoreException {
 		long id = datasource.getId();
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
 		String hash = "";
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ACQUISITION_DETAILS);
 			statement.clearParameters();
 			statement.setLong(1, id);
@@ -9520,7 +10970,79 @@ String getAcquisitionDetails(DataSource datasource) throws TskCoreException {
 			throw new TskCoreException("Error setting acquisition details", ex);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
+			releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get String value from the provided column from data_source_info table.
+	 *
+	 * @param datasource The datasource
+	 * @param columnName The column from which the data should be returned
+	 *
+	 * @return String value from the column
+	 *
+	 * @throws TskCoreException
+	 */
+	String getDataSourceInfoString(DataSource datasource, String columnName) throws TskCoreException {
+		long id = datasource.getId();
+		CaseDbConnection connection = null;
+		ResultSet rs = null;
+		String returnValue = "";
+		acquireSingleUserCaseReadLock();
+		try {
+			connection = connections.getConnection();
+			
+			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ACQUISITION_TOOL_SETTINGS);
+			statement.clearParameters();
+			statement.setLong(1, id);
+			rs = connection.executeQuery(statement);
+			if (rs.next()) {
+				returnValue = rs.getString(columnName);
+			}
+			return returnValue;
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error setting acquisition details", ex);
+		} finally {
+			closeResultSet(rs);
+			closeConnection(connection);
+			releaseSingleUserCaseReadLock();
+		}
+	}
+
+	/**
+	 * Get Long value from the provided column from data_source_info table.
+	 *
+	 * @param datasource The datasource
+	 * @param columnName The column from which the data should be returned
+	 *
+	 * @return Long value from the column
+	 *
+	 * @throws TskCoreException
+	 */
+	Long getDataSourceInfoLong(DataSource datasource, String columnName) throws TskCoreException {
+		long id = datasource.getId();
+		CaseDbConnection connection = null;
+		ResultSet rs = null;
+		Long returnValue = null;
+		acquireSingleUserCaseReadLock();
+		try {
+			connection = connections.getConnection();
+			
+			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_ACQUISITION_TOOL_SETTINGS);
+			statement.clearParameters();
+			statement.setLong(1, id);
+			rs = connection.executeQuery(statement);
+			if (rs.next()) {
+				returnValue = rs.getLong(columnName);
+			}
+			return returnValue;
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error setting acquisition details", ex);
+		} finally {
+			closeResultSet(rs);
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9539,19 +11061,15 @@ public void setReviewStatus(BlackboardArtifact artifact, BlackboardArtifact.Revi
 		if (newStatus == null) {
 			return;
 		}
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		Statement statement = null;
-		try {
-			statement = connection.createStatement();
+		try (CaseDbConnection connection = connections.getConnection();
+			Statement statement = connection.createStatement();) {
 			connection.executeUpdate(statement, "UPDATE blackboard_artifacts "
 					+ " SET review_status_id=" + newStatus.getID()
 					+ " WHERE blackboard_artifacts.artifact_id = " + artifact.getArtifactID());
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting review status", ex);
 		} finally {
-			closeStatement(statement);
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9567,11 +11085,12 @@ public void setReviewStatus(BlackboardArtifact artifact, BlackboardArtifact.Revi
 	 *                          core
 	 */
 	public int countFsContentType(TskData.TSK_FS_META_TYPE_ENUM contentType) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			Short contentShort = contentType.getValue();
 			rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files WHERE meta_type = '" + contentShort.toString() + "'"); //NON-NLS
@@ -9585,7 +11104,7 @@ public int countFsContentType(TskData.TSK_FS_META_TYPE_ENUM contentType) throws
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9617,31 +11136,27 @@ public List<AbstractFile> findFilesByMd5(String md5Hash) {
 		if (md5Hash == null) {
 			return Collections.<AbstractFile>emptyList();
 		}
-		CaseDbConnection connection;
-		try {
-			connection = connections.getConnection();
-		} catch (TskCoreException ex) {
-			logger.log(Level.SEVERE, "Error finding files by md5 hash " + md5Hash, ex); //NON-NLS
-			return Collections.<AbstractFile>emptyList();
-		}
-		acquireSingleUserCaseReadLock();
+		
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE " //NON-NLS
 					+ " md5 = '" + md5Hash.toLowerCase() + "' " //NON-NLS
 					+ "AND size > 0"); //NON-NLS
 			return resultSetToAbstractFiles(rs, connection);
-		} catch (SQLException ex) {
+		} catch (SQLException | TskCoreException ex) {
 			logger.log(Level.WARNING, "Error querying database.", ex); //NON-NLS
-			return Collections.<AbstractFile>emptyList();
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
+		return Collections.<AbstractFile>emptyList();
 	}
 
 	/**
@@ -9651,18 +11166,14 @@ public List<AbstractFile> findFilesByMd5(String md5Hash) {
 	 * @return true if all files have an MD5 hash
 	 */
 	public boolean allFilesMd5Hashed() {
-		CaseDbConnection connection;
-		try {
-			connection = connections.getConnection();
-		} catch (TskCoreException ex) {
-			logger.log(Level.SEVERE, "Error checking md5 hashing status", ex); //NON-NLS
-			return false;
-		}
 		boolean allFilesAreHashed = false;
-		acquireSingleUserCaseReadLock();
+		
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files " //NON-NLS
 					+ "WHERE dir_type = '" + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + "' " //NON-NLS
@@ -9671,12 +11182,12 @@ public boolean allFilesMd5Hashed() {
 			if (rs.next() && rs.getInt("count") == 0) {
 				allFilesAreHashed = true;
 			}
-		} catch (SQLException ex) {
+		} catch (SQLException | TskCoreException ex) {
 			logger.log(Level.WARNING, "Failed to query whether all files have MD5 hashes", ex); //NON-NLS
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return allFilesAreHashed;
@@ -9687,19 +11198,15 @@ public boolean allFilesMd5Hashed() {
 	 *
 	 * @return the number of files with an MD5 hash
 	 */
-	public int countFilesMd5Hashed() {
-		CaseDbConnection connection;
-		try {
-			connection = connections.getConnection();
-		} catch (TskCoreException ex) {
-			logger.log(Level.SEVERE, "Error getting database connection for hashed files count", ex); //NON-NLS
-			return 0;
-		}
+	public int countFilesMd5Hashed() {		
 		int count = 0;
+		
 		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT COUNT(*) AS count FROM tsk_files " //NON-NLS
 					+ "WHERE md5 IS NOT NULL " //NON-NLS
@@ -9707,12 +11214,12 @@ public int countFilesMd5Hashed() {
 			if (rs.next()) {
 				count = rs.getInt("count");
 			}
-		} catch (SQLException ex) {
+		} catch (SQLException | TskCoreException ex) {
 			logger.log(Level.WARNING, "Failed to query for all the files.", ex); //NON-NLS
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return count;
@@ -9728,10 +11235,12 @@ public int countFilesMd5Hashed() {
 	 * @throws TskCoreException
 	 */
 	public List<TagName> getAllTagNames() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT * FROM tag_names
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAMES);
 			resultSet = connection.executeQuery(statement);
@@ -9746,7 +11255,7 @@ public List<TagName> getAllTagNames() throws TskCoreException {
 			throw new TskCoreException("Error selecting rows from tag_names table", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9762,10 +11271,12 @@ public List<TagName> getAllTagNames() throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	public List<TagName> getTagNamesInUse() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT * FROM tag_names WHERE tag_name_id IN (SELECT tag_name_id from content_tags UNION SELECT tag_name_id FROM blackboard_artifact_tags)
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAMES_IN_USE);
 			resultSet = connection.executeQuery(statement);
@@ -9780,7 +11291,7 @@ public List<TagName> getTagNamesInUse() throws TskCoreException {
 			throw new TskCoreException("Error selecting rows from tag_names table", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9799,17 +11310,18 @@ public List<TagName> getTagNamesInUse() throws TskCoreException {
 	 */
 	public List<TagName> getTagNamesInUse(long dsObjId) throws TskCoreException {
 
-		ArrayList<TagName> tagNames = new ArrayList<TagName>();
+		ArrayList<TagName> tagNames = new ArrayList<>();
 		//	SELECT * FROM tag_names WHERE tag_name_id IN 
 		//	 ( SELECT content_tags.tag_name_id as tag_name_id FROM content_tags as content_tags, tsk_files as tsk_files WHERE content_tags.obj_id = tsk_files.obj_id AND tsk_files.data_source_obj_id =  ? "
 		//     UNION 
 		//     SELECT artifact_tags.tag_name_id as tag_name_id FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts WHERE artifact_tags.artifact_id = arts.artifact_id AND arts.data_source_obj_id = ? )
 		//   )
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
-
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAMES_IN_USE_BY_DATASOURCE);
 			statement.setLong(1, dsObjId);
 			statement.setLong(2, dsObjId);
@@ -9824,7 +11336,7 @@ public List<TagName> getTagNamesInUse(long dsObjId) throws TskCoreException {
 			throw new TskCoreException("Failed to get tag names in use for data source objID : " + dsObjId, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9862,10 +11374,8 @@ public TagName addTagName(String displayName, String description, TagName.HTML_C
 	 * @throws TskCoreException
 	 */
 	public TagName addOrUpdateTagName(String displayName, String description, TagName.HTML_COLOR color, TskData.FileKnown knownStatus) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		ResultSet resultSet = null;
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			PreparedStatement statement;
 			// INSERT INTO tag_names (display_name, description, color, knownStatus) VALUES (?, ?, ?, ?) ON CONFLICT (display_name) DO UPDATE SET description = ?, color = ?, knownStatus = ?
 			statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_OR_UPDATE_TAG_NAME, Statement.RETURN_GENERATED_KEYS);
@@ -9882,16 +11392,13 @@ public TagName addOrUpdateTagName(String displayName, String description, TagNam
 			statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_TAG_NAME_BY_NAME);
 			statement.clearParameters();
 			statement.setString(1, displayName);
-			resultSet = connection.executeQuery(statement);
-			resultSet.next();
-
-			return new TagName(resultSet.getLong("tag_name_id"), displayName, description, color, knownStatus, resultSet.getLong("tag_set_id"), resultSet.getInt("rank"));
-			
+			try (ResultSet resultSet = connection.executeQuery(statement)) {
+				resultSet.next();
+				return new TagName(resultSet.getLong("tag_name_id"), displayName, description, color, knownStatus, resultSet.getLong("tag_set_id"), resultSet.getInt("rank"));
+			}
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error adding row for " + displayName + " tag name to tag_names table", ex);
 		} finally {
-			closeResultSet(resultSet);
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -9921,19 +11428,30 @@ public ContentTag addContentTag(Content content, TagName tagName, String comment
 	 * @throws TskCoreException
 	 */
 	public void deleteContentTag(ContentTag tag) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseWriteLock();
+		CaseDbTransaction trans = beginTransaction();
 		try {
 			// DELETE FROM content_tags WHERE tag_id = ?
-			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_CONTENT_TAG);
+			PreparedStatement statement = trans.getConnection().getPreparedStatement(PREPARED_STATEMENT.DELETE_CONTENT_TAG);
 			statement.clearParameters();
 			statement.setLong(1, tag.getId());
-			connection.executeUpdate(statement);
+			trans.getConnection().executeUpdate(statement);
+			
+			// update the aggregate score for the content
+			Long contentId = tag.getContent() != null ? tag.getContent().getId() : null;
+			Long dataSourceId = tag.getContent() != null && tag.getContent().getDataSource() != null 
+					? tag.getContent().getDataSource().getId() 
+					: null;
+			
+			this.getScoringManager().updateAggregateScoreAfterDeletion(contentId, dataSourceId, trans);
+			
+			trans.commit();
+			trans = null;
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error deleting row from content_tags table (id = " + tag.getId() + ")", ex);
 		} finally {
-			connection.close();
-			releaseSingleUserCaseWriteLock();
+			if (trans != null) {
+				trans.rollback();
+			}
 		}
 	}
 
@@ -9946,10 +11464,12 @@ public void deleteContentTag(ContentTag tag) throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	public List<ContentTag> getAllContentTags() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name 
 			//	FROM content_tags 
 			//	INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id 
@@ -9970,7 +11490,7 @@ public List<ContentTag> getAllContentTags() throws TskCoreException {
 			throw new TskCoreException("Error selecting rows from content_tags table", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -9989,10 +11509,12 @@ public long getContentTagsCountByTagName(TagName tagName) throws TskCoreExceptio
 		if (tagName.getId() == Tag.ID_NOT_SET) {
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(*) AS count FROM content_tags WHERE tag_name_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_CONTENT_TAGS_BY_TAG_NAME);
 			statement.clearParameters();
@@ -10007,7 +11529,7 @@ public long getContentTagsCountByTagName(TagName tagName) throws TskCoreExceptio
 			throw new TskCoreException("Error getting content_tags row count for tag name (tag_name_id = " + tagName.getId() + ")", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10033,10 +11555,12 @@ public long getContentTagsCountByTagName(TagName tagName, long dsObjId) throws T
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// "SELECT COUNT(*) AS count FROM content_tags as content_tags, tsk_files as tsk_files WHERE content_tags.obj_id = tsk_files.obj_id"
 			//		+ " AND content_tags.tag_name_id = ? "
 			//		+ " AND tsk_files.data_source_obj_id = ? "
@@ -10055,7 +11579,7 @@ public long getContentTagsCountByTagName(TagName tagName, long dsObjId) throws T
 			throw new TskCoreException("Failed to get content_tags row count for  tag_name_id = " + tagName.getId() + "data source objID : " + dsObjId, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10072,11 +11596,13 @@ public long getContentTagsCountByTagName(TagName tagName, long dsObjId) throws T
 	 */
 	public ContentTag getContentTagByID(long contentTagID) throws TskCoreException {
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
 		ContentTag tag = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name 
 			//	FROM content_tags 
 			//	INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id 
@@ -10100,7 +11626,7 @@ public ContentTag getContentTagByID(long contentTagID) throws TskCoreException {
 			throw new TskCoreException("Error getting content tag with id = " + contentTagID, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return tag;
@@ -10121,10 +11647,12 @@ public List<ContentTag> getContentTagsByTagName(TagName tagName) throws TskCoreE
 		if (tagName.getId() == Tag.ID_NOT_SET) {
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tsk_examiners.login_name 
 			//	FROM content_tags 
 			//  LEFT OUTER JOIN tsk_examiners ON content_tags.examiner_id = tsk_examiners.examiner_id 
@@ -10145,7 +11673,7 @@ public List<ContentTag> getContentTagsByTagName(TagName tagName) throws TskCoreE
 			throw new TskCoreException("Error getting content_tags rows (tag_name_id = " + tagName.getId() + ")", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10164,10 +11692,11 @@ public List<ContentTag> getContentTagsByTagName(TagName tagName) throws TskCoreE
 	 */
 	public List<ContentTag> getContentTagsByTagName(TagName tagName, long dsObjId) throws TskCoreException {
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 
 			//	SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name 
 			//	 FROM content_tags as content_tags, tsk_files as tsk_files 
@@ -10192,7 +11721,7 @@ public List<ContentTag> getContentTagsByTagName(TagName tagName, long dsObjId) t
 			throw new TskCoreException("Failed to get content_tags row count for  tag_name_id = " + tagName.getId() + " data source objID : " + dsObjId, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10209,10 +11738,12 @@ public List<ContentTag> getContentTagsByTagName(TagName tagName, long dsObjId) t
 	 * @throws TskCoreException
 	 */
 	public List<ContentTag> getContentTagsByContent(Content content) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT content_tags.tag_id, content_tags.obj_id, content_tags.tag_name_id, content_tags.comment, content_tags.begin_byte_offset, content_tags.end_byte_offset, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name 
 			//	FROM content_tags 
 			//	INNER JOIN tag_names ON content_tags.tag_name_id = tag_names.tag_name_id 
@@ -10236,7 +11767,7 @@ public List<ContentTag> getContentTagsByContent(Content content) throws TskCoreE
 			throw new TskCoreException("Error getting content tags data for content (obj_id = " + content.getId() + ")", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10266,19 +11797,30 @@ public BlackboardArtifactTag addBlackboardArtifactTag(BlackboardArtifact artifac
 	 * representing the row to delete. @throws TskCoreException
 	 */
 	public void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseWriteLock();
+		CaseDbTransaction trans = beginTransaction();
 		try {
 			// DELETE FROM blackboard_artifact_tags WHERE tag_id = ?
-			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_ARTIFACT_TAG);
+			PreparedStatement statement = trans.getConnection().getPreparedStatement(PREPARED_STATEMENT.DELETE_ARTIFACT_TAG);
 			statement.clearParameters();
 			statement.setLong(1, tag.getId());
-			connection.executeUpdate(statement);
+			trans.getConnection().executeUpdate(statement);
+			
+			// update the aggregate score for the artifact
+			Long artifactObjId = tag.getArtifact().getId();
+			Long dataSourceId = tag.getContent() != null && tag.getContent().getDataSource() != null 
+					? tag.getContent().getDataSource().getId() 
+					: null;
+			
+			this.getScoringManager().updateAggregateScoreAfterDeletion(artifactObjId, dataSourceId, trans);
+			
+			trans.commit();
+			trans = null;
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error deleting row from blackboard_artifact_tags table (id = " + tag.getId() + ")", ex);
 		} finally {
-			connection.close();
-			releaseSingleUserCaseWriteLock();
+			if (trans != null) {
+				trans.rollback();
+			}
 		}
 	}
 
@@ -10292,10 +11834,12 @@ public void deleteBlackboardArtifactTag(BlackboardArtifactTag tag) throws TskCor
 	 * @throws TskCoreException
 	 */
 	public List<BlackboardArtifactTag> getAllBlackboardArtifactTags() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name
 			//	FROM blackboard_artifact_tags 
 			//	INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id 
@@ -10318,7 +11862,7 @@ public List<BlackboardArtifactTag> getAllBlackboardArtifactTags() throws TskCore
 			throw new TskCoreException("Error selecting rows from blackboard_artifact_tags table", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10337,10 +11881,12 @@ public long getBlackboardArtifactTagsCountByTagName(TagName tagName) throws TskC
 		if (tagName.getId() == Tag.ID_NOT_SET) {
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT COUNT(*) AS count FROM blackboard_artifact_tags WHERE tag_name_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.COUNT_ARTIFACTS_BY_TAG_NAME);
 			statement.clearParameters();
@@ -10355,7 +11901,7 @@ public long getBlackboardArtifactTagsCountByTagName(TagName tagName) throws TskC
 			throw new TskCoreException("Error getting blackboard artifact_content_tags row count for tag name (tag_name_id = " + tagName.getId() + ")", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10380,10 +11926,12 @@ public long getBlackboardArtifactTagsCountByTagName(TagName tagName, long dsObjI
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// "SELECT COUNT(*) AS count FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts WHERE artifact_tags.artifact_id = arts.artifact_id"
 			//    + " AND artifact_tags.tag_name_id = ?"
 			//	 + " AND arts.data_source_obj_id =  ? "
@@ -10401,7 +11949,7 @@ public long getBlackboardArtifactTagsCountByTagName(TagName tagName, long dsObjI
 			throw new TskCoreException("Failed to get blackboard_artifact_tags row count for  tag_name_id = " + tagName.getId() + "data source objID : " + dsObjId, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10421,10 +11969,12 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByTagName(TagName ta
 		if (tagName.getId() == Tag.ID_NOT_SET) {
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tsk_examiners.login_name 
 			//	FROM blackboard_artifact_tags 
 			//	LEFT OUTER JOIN tsk_examiners ON blackboard_artifact_tags.examiner_id = tsk_examiners.examiner_id 
@@ -10446,7 +11996,7 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByTagName(TagName ta
 			throw new TskCoreException("Error getting blackboard artifact tags data (tag_name_id = " + tagName.getId() + ")", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10471,10 +12021,12 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByTagName(TagName ta
 			throw new TskCoreException("TagName object is invalid, id not set");
 		}
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			//	SELECT artifact_tags.tag_id, artifact_tags.artifact_id, artifact_tags.tag_name_id, artifact_tags.comment, arts.obj_id, arts.artifact_obj_id, arts.data_source_obj_id, arts.artifact_type_id, arts.review_status_id, tsk_examiners.login_name 
 			//	 FROM blackboard_artifact_tags as artifact_tags, blackboard_artifacts AS arts 
 			//	 LEFT OUTER JOIN tsk_examiners ON artifact_tags.examiner_id = tsk_examiners.examiner_id 
@@ -10499,7 +12051,7 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByTagName(TagName ta
 			throw new TskCoreException("Failed to get blackboard_artifact_tags row count for  tag_name_id = " + tagName.getId() + "data source objID : " + dsObjId, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -10518,11 +12070,13 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByTagName(TagName ta
 	 */
 	public BlackboardArtifactTag getBlackboardArtifactTagByID(long artifactTagID) throws TskCoreException {
 
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
 		BlackboardArtifactTag tag = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			//SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name 
 			//	FROM blackboard_artifact_tags 
 			//	INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id  
@@ -10548,7 +12102,7 @@ public BlackboardArtifactTag getBlackboardArtifactTagByID(long artifactTagID) th
 			throw new TskCoreException("Error getting blackboard artifact tag with id = " + artifactTagID, ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 		return tag;
@@ -10567,10 +12121,12 @@ public BlackboardArtifactTag getBlackboardArtifactTagByID(long artifactTagID) th
 	 * @throws TskCoreException
 	 */
 	public List<BlackboardArtifactTag> getBlackboardArtifactTagsByArtifact(BlackboardArtifact artifact) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			//  SELECT blackboard_artifact_tags.tag_id, blackboard_artifact_tags.artifact_id, blackboard_artifact_tags.tag_name_id, blackboard_artifact_tags.comment, tag_names.display_name, tag_names.description, tag_names.color, tag_names.knownStatus, tsk_examiners.login_name 
 			//	FROM blackboard_artifact_tags 
 			//	INNER JOIN tag_names ON blackboard_artifact_tags.tag_name_id = tag_names.tag_name_id 
@@ -10595,7 +12151,7 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByArtifact(Blackboar
 			throw new TskCoreException("Error getting blackboard artifact tags data (artifact_id = " + artifact.getArtifactID() + ")", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10609,9 +12165,8 @@ public List<BlackboardArtifactTag> getBlackboardArtifactTagsByArtifact(Blackboar
 	 * @throws TskCoreException
 	 */
 	public void updateImagePath(String newPath, long objectId) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			// UPDATE tsk_image_names SET name = ? WHERE obj_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.UPDATE_IMAGE_PATH);
 			statement.clearParameters();
@@ -10621,7 +12176,6 @@ public void updateImagePath(String newPath, long objectId) throws TskCoreExcepti
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error updating image path in database for object " + objectId, ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -10695,10 +12249,8 @@ public Report addReport(String localPath, String sourceModuleName, String report
 		}
 
 		// Write the report data to the database.
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		ResultSet resultSet = null;
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			// Insert a row for the report into the tsk_objects table.
 			// INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)
 			long parentObjId = 0;
@@ -10720,8 +12272,6 @@ public Report addReport(String localPath, String sourceModuleName, String report
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error adding report " + localPath + " to reports table", ex);
 		} finally {
-			closeResultSet(resultSet);
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -10735,13 +12285,15 @@ public Report addReport(String localPath, String sourceModuleName, String report
 	 * @throws TskCoreException
 	 */
 	public List<Report> getAllReports() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
 		ResultSet parentResultSet = null;
 		PreparedStatement statement = null;
 		Statement parentStatement = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT * FROM reports
 			statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_REPORTS);
 			parentStatement = connection.createStatement();
@@ -10782,7 +12334,7 @@ public List<Report> getAllReports() throws TskCoreException {
 			closeStatement(statement);
 			closeStatement(parentStatement);
 
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -10797,14 +12349,16 @@ public List<Report> getAllReports() throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	public Report getReportById(long id) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		PreparedStatement statement = null;
 		Statement parentStatement = null;
 		ResultSet resultSet = null;
 		ResultSet parentResultSet = null;
 		Report report = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT * FROM reports WHERE obj_id = ?
 			statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_REPORT_BY_ID);
 			parentStatement = connection.createStatement();
@@ -10838,7 +12392,7 @@ public Report getReportById(long id) throws TskCoreException {
 			closeResultSet(parentResultSet);
 			closeStatement(statement);
 			closeStatement(parentStatement);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 
@@ -10853,17 +12407,20 @@ public Report getReportById(long id) throws TskCoreException {
 	 * @throws TskCoreException
 	 */
 	public void deleteReport(Report report) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			// DELETE FROM reports WHERE reports.obj_id = ?
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_REPORT);
 			statement.setLong(1, report.getId());
 			connection.executeUpdate(statement);
+			// DELETE FROM tsk_objects WHERE tsk_objects.obj_id = ?
+			statement = connection.getPreparedStatement(PREPARED_STATEMENT.DELETE_REPORT_TSK_OBJECT);
+			statement.setLong(1, report.getId());
+			statement.setLong(2, TskData.ObjectType.REPORT.getObjectType());
+			connection.executeUpdate(statement);
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error querying reports table", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -10888,6 +12445,18 @@ static void closeStatement(Statement statement) {
 			}
 		}
 	}
+	
+	static void closeConnection(CaseDbConnection connection) {
+		if (connection != null) {
+			connection.close();
+		}
+	}
+	
+	private static void rollbackTransaction(CaseDbConnection connection) {
+		if (connection != null) {
+			connection.rollbackTransaction();
+		}
+	}
 
 	/**
 	 * Sets the end date for the given ingest job
@@ -10898,29 +12467,25 @@ static void closeStatement(Statement statement) {
 	 * @throws TskCoreException If inserting into the database fails
 	 */
 	void setIngestJobEndDateTime(long ingestJobId, long endDateTime) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
+		try (CaseDbConnection connection = connections.getConnection();) {
 			Statement statement = connection.createStatement();
 			statement.executeUpdate("UPDATE ingest_jobs SET end_date_time=" + endDateTime + " WHERE ingest_job_id=" + ingestJobId + ";");
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error updating the end date (ingest_job_id = " + ingestJobId + ".", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
 
 	void setIngestJobStatus(long ingestJobId, IngestJobStatusType status) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
 		acquireSingleUserCaseWriteLock();
-		try {
-			Statement statement = connection.createStatement();
+		try (CaseDbConnection connection = connections.getConnection();
+			Statement statement = connection.createStatement();) {
 			statement.executeUpdate("UPDATE ingest_jobs SET status_id=" + status.ordinal() + " WHERE ingest_job_id=" + ingestJobId + ";");
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error ingest job status (ingest_job_id = " + ingestJobId + ".", ex);
 		} finally {
-			connection.close();
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -10942,11 +12507,12 @@ void setIngestJobStatus(long ingestJobId, IngestJobStatusType status) throws Tsk
 	 * @throws TskCoreException If adding the job to the database fails.
 	 */
 	public final IngestJobInfo addIngestJob(Content dataSource, String hostName, List<IngestModuleInfo> ingestModules, Date jobStart, Date jobEnd, IngestJobStatusType status, String settingsDir) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		acquireSingleUserCaseWriteLock();
 		ResultSet resultSet = null;
 		Statement statement;
 		try {
+			connection = connections.getConnection();
 			connection.beginTransaction();
 			statement = connection.createStatement();
 			PreparedStatement insertStatement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_INGEST_JOB, Statement.RETURN_GENERATED_KEYS);
@@ -10970,11 +12536,11 @@ public final IngestJobInfo addIngestJob(Content dataSource, String hostName, Lis
 			connection.commitTransaction();
 			return new IngestJobInfo(id, dataSource.getId(), hostName, jobStart, "", ingestModules, this);
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			rollbackTransaction(connection);
 			throw new TskCoreException("Error adding the ingest job.", ex);
 		} finally {
 			closeResultSet(resultSet);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -10993,12 +12559,13 @@ public final IngestJobInfo addIngestJob(Content dataSource, String hostName, Lis
 	 * @throws TskCoreException When the ingest module cannot be added.
 	 */
 	public final IngestModuleInfo addIngestModule(String displayName, String factoryClassName, IngestModuleType type, String version) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
 		Statement statement = null;
 		String uniqueName = factoryClassName + "-" + displayName + "-" + type.toString() + "-" + version;
 		acquireSingleUserCaseWriteLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			resultSet = statement.executeQuery("SELECT * FROM ingest_modules WHERE unique_name = '" + uniqueName + "'");
 			if (!resultSet.next()) {
@@ -11023,21 +12590,22 @@ public final IngestModuleInfo addIngestModule(String displayName, String factory
 		} catch (SQLException ex) {
 			try {
 				closeStatement(statement);
-				statement = connection.createStatement();
-				resultSet = statement.executeQuery("SELECT * FROM ingest_modules WHERE unique_name = '" + uniqueName + "'");
-				if (resultSet.next()) {
-					return new IngestModuleInfo(resultSet.getInt("ingest_module_id"), resultSet.getString("display_name"),
-							uniqueName, IngestModuleType.fromID(resultSet.getInt("type_id")), resultSet.getString("version"));
-				} else {
-					throw new TskCoreException("Couldn't add new module to database.", ex);
+				if (connection != null) {
+					statement = connection.createStatement();
+					resultSet = statement.executeQuery("SELECT * FROM ingest_modules WHERE unique_name = '" + uniqueName + "'");
+					if (resultSet.next()) {
+						return new IngestModuleInfo(resultSet.getInt("ingest_module_id"), resultSet.getString("display_name"),
+								uniqueName, IngestModuleType.fromID(resultSet.getInt("type_id")), resultSet.getString("version"));
+					}
 				}
+				throw new TskCoreException("Couldn't add new module to database.", ex);
 			} catch (SQLException ex1) {
 				throw new TskCoreException("Couldn't add new module to database.", ex1);
 			}
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseWriteLock();
 		}
 	}
@@ -11050,12 +12618,13 @@ public final IngestModuleInfo addIngestModule(String displayName, String factory
 	 * @throws TskCoreException If there is a problem getting the ingest jobs
 	 */
 	public final List<IngestJobInfo> getIngestJobs() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
+		CaseDbConnection connection = null;
 		ResultSet resultSet = null;
 		Statement statement = null;
-		List<IngestJobInfo> ingestJobs = new ArrayList<IngestJobInfo>();
+		List<IngestJobInfo> ingestJobs = new ArrayList<>();
 		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			statement = connection.createStatement();
 			resultSet = statement.executeQuery("SELECT * FROM ingest_jobs");
 			while (resultSet.next()) {
@@ -11070,7 +12639,7 @@ public final List<IngestJobInfo> getIngestJobs() throws TskCoreException {
 		} finally {
 			closeResultSet(resultSet);
 			closeStatement(statement);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -11088,7 +12657,7 @@ public final List<IngestJobInfo> getIngestJobs() throws TskCoreException {
 	private List<IngestModuleInfo> getIngestModules(int ingestJobId, CaseDbConnection connection) throws SQLException {
 		ResultSet resultSet = null;
 		Statement statement = null;
-		List<IngestModuleInfo> ingestModules = new ArrayList<IngestModuleInfo>();
+		List<IngestModuleInfo> ingestModules = new ArrayList<>();
 		acquireSingleUserCaseReadLock();
 		try {
 			statement = connection.createStatement();
@@ -11113,6 +12682,26 @@ private List<IngestModuleInfo> getIngestModules(int ingestJobId, CaseDbConnectio
 		}
 	}
 
+	/**
+	 * Builds "INSERT or IGNORE ....", or "INSERT .... ON CONFLICT DO NOTHING"
+	 * insert SQL, based on the database type being used, using the given base
+	 * SQL.
+	 *
+	 * @param sql Base insert SQL - "INTO xyz ...."
+	 *
+	 * @return SQL string.
+	 */
+	String getInsertOrIgnoreSQL(String sql) {
+		switch (getDatabaseType()) {
+			case POSTGRESQL:
+				return " INSERT " + sql + " ON CONFLICT DO NOTHING "; //NON-NLS
+			case SQLITE:
+				return " INSERT OR IGNORE " + sql; //NON-NLS
+			default:
+				throw new UnsupportedOperationException("Unsupported DB type: " + getDatabaseType().name());
+		}
+	}
+
 	/**
 	 * Stores a pair of object ID and its type
 	 */
@@ -11158,6 +12747,19 @@ private enum PREPARED_STATEMENT {
 				+ "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS
 				+ "WHERE (tsk_objects.par_obj_id = ? AND tsk_files.type = ? ) " //NON-NLS
 				+ "ORDER BY tsk_files.dir_type, LOWER(tsk_files.name)"), //NON-NLS
+		SELECT_FILES_BY_PARENT_AND_NAME("SELECT tsk_files.* " //NON-NLS
+				+ "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS
+				+ "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS
+				+ "WHERE (tsk_objects.par_obj_id = ? AND " //NON-NLS
+				+ "LOWER(tsk_files.name) LIKE LOWER(?) AND LOWER(tsk_files.name) NOT LIKE LOWER('%journal%')) "//NON-NLS
+				+ "ORDER BY tsk_files.dir_type, LOWER(tsk_files.name)"), //NON-NLS
+		SELECT_FILES_BY_EXTENSION_AND_PARENT_AND_NAME("SELECT tsk_files.* " //NON-NLS
+				+ "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS
+				+ "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS
+				+ "WHERE tsk_files.extension = ? AND "
+				+ "(tsk_objects.par_obj_id = ? AND " //NON-NLS
+				+ "LOWER(tsk_files.name) LIKE LOWER(?) AND LOWER(tsk_files.name) NOT LIKE LOWER('%journal%')) "//NON-NLS
+				+ "ORDER BY tsk_files.dir_type, LOWER(tsk_files.name)"), //NON-NLS
 		SELECT_FILE_IDS_BY_PARENT("SELECT tsk_files.obj_id AS obj_id " //NON-NLS
 				+ "FROM tsk_objects INNER JOIN tsk_files " //NON-NLS
 				+ "ON tsk_objects.obj_id=tsk_files.obj_id " //NON-NLS
@@ -11169,11 +12771,14 @@ private enum PREPARED_STATEMENT {
 				+ "AND tsk_files.type = ? )"), //NON-NLS
 		SELECT_FILE_BY_ID("SELECT * FROM tsk_files WHERE obj_id = ? LIMIT 1"), //NON-NLS
 		SELECT_ARTIFACT_BY_ARTIFACT_OBJ_ID("SELECT * FROM blackboard_artifacts WHERE artifact_obj_id = ? LIMIT 1"),
+		SELECT_ARTIFACT_TYPE_BY_ARTIFACT_OBJ_ID("SELECT artifact_type_id FROM blackboard_artifacts WHERE artifact_obj_id = ? LIMIT 1"),
 		SELECT_ARTIFACT_BY_ARTIFACT_ID("SELECT * FROM blackboard_artifacts WHERE artifact_id = ? LIMIT 1"),
 		INSERT_ARTIFACT("INSERT INTO blackboard_artifacts (artifact_id, obj_id, artifact_obj_id, data_source_obj_id, artifact_type_id, review_status_id) " //NON-NLS
 				+ "VALUES (?, ?, ?, ?, ?," + BlackboardArtifact.ReviewStatus.UNDECIDED.getID() + ")"), //NON-NLS
 		POSTGRESQL_INSERT_ARTIFACT("INSERT INTO blackboard_artifacts (artifact_id, obj_id, artifact_obj_id, data_source_obj_id, artifact_type_id, review_status_id) " //NON-NLS
 				+ "VALUES (DEFAULT, ?, ?, ?, ?," + BlackboardArtifact.ReviewStatus.UNDECIDED.getID() + ")"), //NON-NLS
+		INSERT_ANALYSIS_RESULT("INSERT INTO tsk_analysis_results (artifact_obj_id, conclusion, significance, priority, configuration, justification) " //NON-NLS
+				+ "VALUES (?, ?, ?, ?, ?, ?)"), //NON-NLS
 		INSERT_STRING_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_text) " //NON-NLS
 				+ "VALUES (?,?,?,?,?,?,?)"), //NON-NLS
 		INSERT_BYTE_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_byte) " //NON-NLS
@@ -11184,8 +12789,12 @@ private enum PREPARED_STATEMENT {
 				+ "VALUES (?,?,?,?,?,?,?)"), //NON-NLS
 		INSERT_DOUBLE_ATTRIBUTE("INSERT INTO blackboard_attributes (artifact_id, artifact_type_id, source, context, attribute_type_id, value_type, value_double) " //NON-NLS
 				+ "VALUES (?,?,?,?,?,?,?)"), //NON-NLS
+		INSERT_FILE_ATTRIBUTE("INSERT INTO tsk_file_attributes (obj_id, attribute_type_id, value_type, value_byte, value_text, value_int32, value_int64, value_double) " //NON-NLS
+				+ "VALUES (?,?,?,?,?,?,?,?)"), //NON-NLS
 		SELECT_FILES_BY_DATA_SOURCE_AND_NAME("SELECT * FROM tsk_files WHERE LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND data_source_obj_id = ?"), //NON-NLS
+		SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_NAME("SELECT * FROM tsk_files WHERE extension = ? AND LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND data_source_obj_id = ?"), //NON-NLS
 		SELECT_FILES_BY_DATA_SOURCE_AND_PARENT_PATH_AND_NAME("SELECT * FROM tsk_files WHERE LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND LOWER(parent_path) LIKE LOWER(?) AND data_source_obj_id = ?"), //NON-NLS
+		SELECT_FILES_BY_EXTENSION_AND_DATA_SOURCE_AND_PARENT_PATH_AND_NAME("SELECT * FROM tsk_files WHERE extension = ? AND LOWER(name) LIKE LOWER(?) AND LOWER(name) NOT LIKE LOWER('%journal%') AND LOWER(parent_path) LIKE LOWER(?) AND data_source_obj_id = ?"), //NON-NLS
 		UPDATE_FILE_MD5("UPDATE tsk_files SET md5 = ? WHERE obj_id = ?"), //NON-NLS
 		UPDATE_IMAGE_MD5("UPDATE tsk_image_info SET md5 = ? WHERE obj_id = ?"), //NON-NLS
 		UPDATE_IMAGE_SHA1("UPDATE tsk_image_info SET sha1 = ? WHERE obj_id = ?"), //NON-NLS
@@ -11194,7 +12803,9 @@ private enum PREPARED_STATEMENT {
 		SELECT_IMAGE_SHA1("SELECT sha1 FROM tsk_image_info WHERE obj_id = ?"), //NON-NLS
 		SELECT_IMAGE_SHA256("SELECT sha256 FROM tsk_image_info WHERE obj_id = ?"), //NON-NLS
 		UPDATE_ACQUISITION_DETAILS("UPDATE data_source_info SET acquisition_details = ? WHERE obj_id = ?"), //NON-NLS
+		UPDATE_ACQUISITION_TOOL_SETTINGS("UPDATE data_source_info SET acquisition_tool_settings = ?, acquisition_tool_name = ?, acquisition_tool_version = ? WHERE obj_id = ?"), //NON-NLS
 		SELECT_ACQUISITION_DETAILS("SELECT acquisition_details FROM data_source_info WHERE obj_id = ?"), //NON-NLS
+		SELECT_ACQUISITION_TOOL_SETTINGS("SELECT acquisition_tool_settings, acquisition_tool_name, acquisition_tool_version, added_date_time FROM data_source_info WHERE obj_id = ?"), //NON-NLS
 		SELECT_LOCAL_PATH_FOR_FILE("SELECT path FROM tsk_files_path WHERE obj_id = ?"), //NON-NLS
 		SELECT_ENCODING_FOR_FILE("SELECT encoding_type FROM tsk_files_path WHERE obj_id = ?"), // NON-NLS
 		SELECT_LOCAL_PATH_AND_ENCODING_FOR_FILE("SELECT path, encoding_type FROM tsk_files_path WHERE obj_id = ?"), // NON_NLS
@@ -11204,10 +12815,10 @@ private enum PREPARED_STATEMENT {
 		SELECT_FILE_DERIVATION_METHOD("SELECT tool_name, tool_version, other FROM tsk_files_derived_method WHERE derived_id = ?"), //NON-NLS
 		SELECT_MAX_OBJECT_ID("SELECT MAX(obj_id) AS max_obj_id FROM tsk_objects"), //NON-NLS
 		INSERT_OBJECT("INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)"), //NON-NLS
-		INSERT_FILE("INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, known, mime_type, parent_path, data_source_obj_id,extension) " //NON-NLS
-				+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), //NON-NLS
-		INSERT_FILE_SYSTEM_FILE("INSERT INTO tsk_files(obj_id, fs_obj_id, data_source_obj_id, attr_type, attr_id, name, meta_addr, meta_seq, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, parent_path, extension)"
-				+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), // NON-NLS
+		INSERT_FILE("INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, known, mime_type, parent_path, data_source_obj_id, extension, owner_uid, os_account_obj_id  ) " //NON-NLS
+				+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), //NON-NLS
+		INSERT_FILE_SYSTEM_FILE("INSERT INTO tsk_files(obj_id, fs_obj_id, data_source_obj_id, attr_type, attr_id, name, meta_addr, meta_seq, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, sha256, mime_type, parent_path, extension, owner_uid, os_account_obj_id )"
+				+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), // NON-NLS
 		UPDATE_DERIVED_FILE("UPDATE tsk_files SET type = ?, dir_type = ?, meta_type = ?, dir_flags = ?,  meta_flags = ?, size= ?, ctime= ?, crtime= ?, atime= ?, mtime= ?, mime_type = ?  "
 				+ "WHERE obj_id = ?"), //NON-NLS
 		INSERT_LAYOUT_FILE("INSERT INTO tsk_file_layout (obj_id, byte_start, byte_len, sequence) " //NON-NLS
@@ -11301,6 +12912,7 @@ private enum PREPARED_STATEMENT {
 		SELECT_REPORT_BY_ID("SELECT * FROM reports WHERE obj_id = ?"), //NON-NLS
 		INSERT_REPORT("INSERT INTO reports (obj_id, path, crtime, src_module_name, report_name) VALUES (?, ?, ?, ?, ?)"), //NON-NLS
 		DELETE_REPORT("DELETE FROM reports WHERE reports.obj_id = ?"), //NON-NLS
+		DELETE_REPORT_TSK_OBJECT("DELETE FROM tsk_objects where tsk_objects.obj_id = ? and tsk_objects.type = ?"),
 		INSERT_INGEST_JOB("INSERT INTO ingest_jobs (obj_id, host_name, start_date_time, end_date_time, status_id, settings_dir) VALUES (?, ?, ?, ?, ?, ?)"), //NON-NLS
 		INSERT_INGEST_MODULE("INSERT INTO ingest_modules (display_name, unique_name, type_id, version) VALUES(?, ?, ?, ?)"), //NON-NLS
 		SELECT_ATTR_BY_VALUE_BYTE("SELECT source FROM blackboard_attributes WHERE artifact_id = ? AND attribute_type_id = ? AND value_type = 4 AND value_byte = ?"), //NON-NLS
@@ -11317,11 +12929,12 @@ private enum PREPARED_STATEMENT {
 		INSERT_EXAMINER_SQLITE("INSERT OR IGNORE INTO tsk_examiners (login_name) VALUES (?)"),
 		UPDATE_FILE_NAME("UPDATE tsk_files SET name = ? WHERE obj_id = ?"),
 		UPDATE_IMAGE_NAME("UPDATE tsk_image_info SET display_name = ? WHERE obj_id = ?"),
+		UPDATE_IMAGE_SIZES("UPDATE tsk_image_info SET size = ?, ssize = ? WHERE obj_id = ?"),
 		DELETE_IMAGE_NAME("DELETE FROM tsk_image_names WHERE obj_id = ?"),
 		INSERT_IMAGE_NAME("INSERT INTO tsk_image_names (obj_id, name, sequence) VALUES (?, ?, ?)"),
 		INSERT_IMAGE_INFO("INSERT INTO tsk_image_info (obj_id, type, ssize, tzone, size, md5, sha1, sha256, display_name)"
 				+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)"),
-		INSERT_DATA_SOURCE_INFO("INSERT INTO data_source_info (obj_id, device_id, time_zone) VALUES (?, ?, ?)"),
+		INSERT_DATA_SOURCE_INFO("INSERT INTO data_source_info (obj_id, device_id, time_zone, added_date_time, host_id) VALUES (?, ?, ?, ?, ?)"),
 		INSERT_VS_INFO("INSERT INTO tsk_vs_info (obj_id, vs_type, img_offset, block_size) VALUES (?, ?, ?, ?)"),
 		INSERT_VS_PART_SQLITE("INSERT INTO tsk_vs_parts (obj_id, addr, start, length, desc, flags) VALUES (?, ?, ?, ?, ?, ?)"),
 		INSERT_VS_PART_POSTGRESQL("INSERT INTO tsk_vs_parts (obj_id, addr, start, length, descr, flags) VALUES (?, ?, ?, ?, ?, ?)"),
@@ -11420,6 +13033,13 @@ private final class SQLiteConnections extends ConnectionPool {
 
 		@Override
 		public CaseDbConnection getPooledConnection() throws SQLException {
+			// If the requesting thread already has an open transaction, the new connection may get SQLITE_BUSY errors. 
+			if (CaseDbTransaction.hasOpenTransaction(Thread.currentThread().getId())) {
+				// Temporarily filter out Image Gallery threads
+				if (!Thread.currentThread().getName().contains("ImageGallery")) {
+					logger.log(Level.WARNING, String.format("Thread %s (ID = %d) already has an open transaction.  New connection may encounter SQLITE_BUSY error. ", Thread.currentThread().getName(), Thread.currentThread().getId()), new Throwable());
+				}
+			}
 			return new SQLiteConnection(getPooledDataSource().getConnection());
 		}
 	}
@@ -11513,6 +13133,30 @@ public void execute() throws SQLException {
 			}
 		}
 
+		/**
+		 * Obtains a write lock on tsk_aggregate_score table. Only PostgreSQL is
+		 * supported.
+		 *
+		 * NOTE: We run into deadlock risks when we start to lock multiple
+		 * tables. If that need arrises, consider changing to opportunistic
+		 * locking and single-step transactions.
+		 */
+		private class AggregateScoreTablePostgreSQLWriteLock implements DbCommand {
+
+			private final Connection connection;
+
+			AggregateScoreTablePostgreSQLWriteLock(Connection connection) {
+				this.connection = connection;
+			}
+
+			@Override
+			public void execute() throws SQLException {
+				PreparedStatement preparedStatement = connection.prepareStatement("LOCK TABLE ONLY tsk_aggregate_score in SHARE ROW EXCLUSIVE MODE");
+				preparedStatement.execute();
+
+			}
+		}
+
 		private class ExecuteQuery implements DbCommand {
 
 			private final Statement statement;
@@ -11676,17 +13320,17 @@ PreparedStatement getPreparedStatement(PREPARED_STATEMENT statementKey, int gene
 			}
 			return statement;
 		}
-		
+
 		/**
-		 * Get a prepared statement for the given input.
-		 * Will cache the prepared statement for this connection.
-		 * 
-		 * @param sqlStatement  The SQL for the prepared statement.
-		 * @param generateKeys  The generate keys enum from Statement.
-		 * 
+		 * Get a prepared statement for the given input. Will cache the prepared
+		 * statement for this connection.
+		 *
+		 * @param sqlStatement The SQL for the prepared statement.
+		 * @param generateKeys The generate keys enum from Statement.
+		 *
 		 * @return The prepared statement
-		 * 
-		 * @throws SQLException 
+		 *
+		 * @throws SQLException
 		 */
 		PreparedStatement getPreparedStatement(String sqlStatement, int generateKeys) throws SQLException {
 			PreparedStatement statement;
@@ -11758,6 +13402,29 @@ void rollbackTransactionWithThrow() throws SQLException {
 			}
 		}
 
+		/**
+		 * Blocks until a write lock can be obtained on the tsk_aggregate_score
+		 * table. Used to ensure only one thread/client is updating the score at
+		 * a time. Can be called multiple times on the same transaction.
+		 *
+		 * @throws SQLException
+		 * @throws TskCoreException
+		 */
+		void getAggregateScoreTableWriteLock() throws SQLException, TskCoreException {
+			switch (getDatabaseType()) {
+				case POSTGRESQL:
+					AggregateScoreTablePostgreSQLWriteLock tableWriteLock = new AggregateScoreTablePostgreSQLWriteLock(connection);
+					executeCommand(tableWriteLock);
+					break;
+				case SQLITE:
+					// We do nothing here because we assume the entire SQLite DB is already locked from
+					// when the analysis results were added/deleted in the same transaction. 
+					break;
+				default:
+					throw new TskCoreException("Unknown DB Type: " + getDatabaseType().name());
+			}
+		}
+
 		ResultSet executeQuery(Statement statement, String query) throws SQLException {
 			ExecuteQuery queryCommand = new ExecuteQuery(statement, query);
 			executeCommand(queryCommand);
@@ -11799,10 +13466,10 @@ void executeUpdate(PreparedStatement statement) throws SQLException {
 		@Override
 		public void close() {
 			try {
-				for (PreparedStatement stmt:preparedStatements.values()) {
+				for (PreparedStatement stmt : preparedStatements.values()) {
 					closeStatement(stmt);
 				}
-				for (PreparedStatement stmt:adHocPreparedStatements.values()) {
+				for (PreparedStatement stmt : adHocPreparedStatements.values()) {
 					closeStatement(stmt);
 				}
 				connection.close();
@@ -11919,27 +13586,46 @@ void executeCommand(DbCommand command) throws SQLException {
 	 * Transaction interface because that sort of flexibility and its associated
 	 * complexity is not needed. Also, TskCoreExceptions are thrown to be
 	 * consistent with the outer SleuthkitCase class.
-	 * 
-	 * This class will automatically acquire the single user case write lock
-	 * and release it when the transaction is closed. Otherwise we risk deadlock 
+	 *
+	 * This class will automatically acquire the single user case write lock and
+	 * release it when the transaction is closed. Otherwise we risk deadlock
 	 * because this transaction can lock up SQLite and make it "busy" and
-	 * another thread may get a write lock to the DB, but not
-	 * be able to do anything because the DB is busy.
+	 * another thread may get a write lock to the DB, but not be able to do
+	 * anything because the DB is busy.
 	 */
 	public static final class CaseDbTransaction {
 
 		private final CaseDbConnection connection;
 		private SleuthkitCase sleuthkitCase;
 
-		private CaseDbTransaction(SleuthkitCase sleuthkitCase, CaseDbConnection connection) throws TskCoreException {
-			this.connection = connection;
+		// A collection of object score changes that ocuured as part of this transaction.
+		// When the transaction is committed, events are fired to notify any listeners.
+		// Score changes are stored as a map keyed by objId to prevent duplicates.
+		private Map<Long, ScoreChange> scoreChangeMap = new HashMap<>();
+		private List<Host> hostsAdded = new ArrayList<>();
+		private List<OsAccount> accountsChanged = new ArrayList<>();
+		private List<OsAccount> accountsAdded = new ArrayList<>();
+		private List<Long> deletedOsAccountObjectIds = new ArrayList<>();
+		private List<Long> deletedResultObjectIds = new ArrayList<>();
+
+		private static Set<Long> threadsWithOpenTransaction = new HashSet<>();
+		private static final Object threadsWithOpenTransactionLock = new Object();
+
+		private CaseDbTransaction(SleuthkitCase sleuthkitCase) throws TskCoreException {
 			this.sleuthkitCase = sleuthkitCase;
+
+			sleuthkitCase.acquireSingleUserCaseWriteLock();
+			this.connection = sleuthkitCase.getConnection();
 			try {
-				this.connection.beginTransaction();
+				synchronized (threadsWithOpenTransactionLock) {
+					this.connection.beginTransaction();
+					threadsWithOpenTransaction.add(Thread.currentThread().getId());
+				}
 			} catch (SQLException ex) {
+				sleuthkitCase.releaseSingleUserCaseWriteLock();
 				throw new TskCoreException("Failed to create transaction on case database", ex);
 			}
-			sleuthkitCase.acquireSingleUserCaseWriteLock();
+
 		}
 
 		/**
@@ -11953,6 +13639,81 @@ CaseDbConnection getConnection() {
 			return this.connection;
 		}
 
+		/**
+		 * Saves a score change done as part of the transaction.
+		 *
+		 * @param scoreChange Score change.
+		 */
+		void registerScoreChange(ScoreChange scoreChange) {
+			scoreChangeMap.put(scoreChange.getObjectId(), scoreChange);
+		}
+
+		/**
+		 * Saves a host that has been added as a part of this transaction.
+		 *
+		 * @param host The host.
+		 */
+		void registerAddedHost(Host host) {
+			if (host != null) {
+				this.hostsAdded.add(host);
+			}
+		}
+
+		/**
+		 * Saves an account that has been updated as a part of this transaction.
+		 *
+		 * @param account The account.
+		 */
+		void registerChangedOsAccount(OsAccount account) {
+			if (account != null) {
+				accountsChanged.add(account);
+			}
+		}
+
+		/**
+		 * Saves an account that has been deleted as a part of this transaction.
+		 *
+		 * @param osAccountObjId The account.
+		 */
+		void registerDeletedOsAccount(long osAccountObjId) {
+			deletedOsAccountObjectIds.add(osAccountObjId);
+		}
+
+		/**
+		 * Saves an account that has been added as a part of this transaction.
+		 *
+		 * @param account The account.
+		 */
+		void registerAddedOsAccount(OsAccount account) {
+			if (account != null) {
+				accountsAdded.add(account);
+			}
+		}
+
+		/**
+		 * Saves an analysis result that has been deleted as a part of this
+		 * transaction.
+		 *
+		 * @param result Deleted result.
+		 */
+		void registerDeletedAnalysisResult(long analysisResultObjId) {
+			this.deletedResultObjectIds.add(analysisResultObjId);
+		}
+
+		/**
+		 * Check if the given thread has an open transaction.
+		 *
+		 * @param threadId Thread id to check for.
+		 *
+		 * @return True if the given thread has an open transaction, false
+		 *         otherwise.
+		 */
+		private static boolean hasOpenTransaction(long threadId) {
+			synchronized (threadsWithOpenTransactionLock) {
+				return threadsWithOpenTransaction.contains(threadId);
+			}
+		}
+
 		/**
 		 * Commits the transaction on the case database that was begun when this
 		 * object was constructed.
@@ -11966,6 +13727,29 @@ public void commit() throws TskCoreException {
 				throw new TskCoreException("Failed to commit transaction on case database", ex);
 			} finally {
 				close();
+
+				if (!scoreChangeMap.isEmpty()) {
+					Map<Long, List<ScoreChange>> changesByDataSource = scoreChangeMap.values().stream()
+							.collect(Collectors.groupingBy(ScoreChange::getDataSourceObjectId));
+					for (Map.Entry<Long, List<ScoreChange>> entry : changesByDataSource.entrySet()) {
+						sleuthkitCase.fireTSKEvent(new TskEvent.AggregateScoresChangedEvent(entry.getKey(), ImmutableSet.copyOf(entry.getValue())));
+					}
+				}
+				if (!hostsAdded.isEmpty()) {
+					sleuthkitCase.fireTSKEvent(new TskEvent.HostsAddedTskEvent(hostsAdded));
+				}
+				if (!accountsAdded.isEmpty()) {
+					sleuthkitCase.fireTSKEvent(new TskEvent.OsAccountsAddedTskEvent(accountsAdded));
+				}
+				if (!accountsChanged.isEmpty()) {
+					sleuthkitCase.fireTSKEvent(new TskEvent.OsAccountsUpdatedTskEvent(accountsChanged));
+				}
+				if (!deletedOsAccountObjectIds.isEmpty()) {
+					sleuthkitCase.fireTSKEvent(new TskEvent.OsAccountsDeletedTskEvent(deletedOsAccountObjectIds));
+				}
+				if (!deletedResultObjectIds.isEmpty()) {
+					sleuthkitCase.fireTSKEvent(new TskEvent.AnalysisResultsDeletedTskEvent(deletedResultObjectIds));
+				}
 			}
 		}
 
@@ -11992,6 +13776,9 @@ public void rollback() throws TskCoreException {
 		void close() {
 			this.connection.close();
 			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			synchronized (threadsWithOpenTransactionLock) {
+				threadsWithOpenTransaction.remove(Thread.currentThread().getId());
+			}
 		}
 	}
 
@@ -12019,18 +13806,17 @@ private CaseDbQuery(String query, boolean allowWriteQuery) throws TskCoreExcepti
 					throw new TskCoreException("Unsupported query: Only SELECT queries are supported.");
 				}
 			}
+			
+			SleuthkitCase.this.acquireSingleUserCaseReadLock();
 			try {
 				connection = connections.getConnection();
-			} catch (TskCoreException ex) {
-				throw new TskCoreException("Error getting connection for query: ", ex);
-			}
-
-			try {
-				SleuthkitCase.this.acquireSingleUserCaseReadLock();
 				resultSet = connection.executeQuery(connection.createStatement(), query);
 			} catch (SQLException ex) {
 				SleuthkitCase.this.releaseSingleUserCaseReadLock();
 				throw new TskCoreException("Error executing query: ", ex);
+			} catch (TskCoreException ex) {
+				SleuthkitCase.this.releaseSingleUserCaseReadLock();
+				throw ex;
 			}
 		}
 
@@ -12053,7 +13839,7 @@ public void close() throws TskCoreException {
 					}
 					resultSet.close();
 				}
-				connection.close();
+				closeConnection(connection);
 			} catch (SQLException ex) {
 				throw new TskCoreException("Error closing query: ", ex);
 			} finally {
@@ -12167,7 +13953,7 @@ long getDataSourceObjectId(long objectId) {
 			try {
 				return getDataSourceObjectId(connection, objectId);
 			} finally {
-				connection.close();
+				closeConnection(connection);
 			}
 		} catch (TskCoreException ex) {
 			logger.log(Level.SEVERE, "Error getting data source object id for a file", ex);
@@ -12186,10 +13972,12 @@ long getDataSourceObjectId(long objectId) {
 	 */
 	@Deprecated
 	public long getLastObjectId() throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
+			
 			// SELECT MAX(obj_id) AS max_obj_id FROM tsk_objects
 			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.SELECT_MAX_OBJECT_ID);
 			rs = connection.executeQuery(statement);
@@ -12202,7 +13990,7 @@ public long getLastObjectId() throws TskCoreException {
 			throw new TskCoreException("Error getting last object id", e);
 		} finally {
 			closeResultSet(rs);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12222,11 +14010,12 @@ public long getLastObjectId() throws TskCoreException {
 	 */
 	@Deprecated
 	public List<FsContent> findFilesWhere(String sqlWhereClause) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT * FROM tsk_files WHERE " + sqlWhereClause); //NON-NLS
 			List<FsContent> results = new ArrayList<FsContent>();
@@ -12243,7 +14032,7 @@ public List<FsContent> findFilesWhere(String sqlWhereClause) throws TskCoreExcep
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12261,11 +14050,12 @@ public List<FsContent> findFilesWhere(String sqlWhereClause) throws TskCoreExcep
 	 */
 	@Deprecated
 	public int getArtifactTypeID(String artifactTypeName) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT artifact_type_id FROM blackboard_artifact_types WHERE type_name = '" + artifactTypeName + "'"); //NON-NLS
 			int typeId = -1;
@@ -12278,7 +14068,7 @@ public int getArtifactTypeID(String artifactTypeName) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12353,11 +14143,12 @@ public int addAttrType(String attrTypeString, String displayName) throws TskCore
 	 */
 	@Deprecated
 	public int getAttrTypeID(String attrTypeName) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT attribute_type_id FROM blackboard_attribute_types WHERE type_name = '" + attrTypeName + "'"); //NON-NLS
 			int typeId = -1;
@@ -12370,7 +14161,7 @@ public int getAttrTypeID(String attrTypeName) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12389,11 +14180,12 @@ public int getAttrTypeID(String attrTypeName) throws TskCoreException {
 	 */
 	@Deprecated
 	public String getAttrTypeString(int attrTypeID) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT type_name FROM blackboard_attribute_types WHERE attribute_type_id = " + attrTypeID); //NON-NLS
 			if (rs.next()) {
@@ -12406,7 +14198,7 @@ public String getAttrTypeString(int attrTypeID) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12425,11 +14217,12 @@ public String getAttrTypeString(int attrTypeID) throws TskCoreException {
 	 */
 	@Deprecated
 	public String getAttrTypeDisplayName(int attrTypeID) throws TskCoreException {
-		CaseDbConnection connection = connections.getConnection();
-		acquireSingleUserCaseReadLock();
+		CaseDbConnection connection = null;
 		Statement s = null;
 		ResultSet rs = null;
+		acquireSingleUserCaseReadLock();
 		try {
+			connection = connections.getConnection();
 			s = connection.createStatement();
 			rs = connection.executeQuery(s, "SELECT display_name FROM blackboard_attribute_types WHERE attribute_type_id = " + attrTypeID); //NON-NLS
 			if (rs.next()) {
@@ -12442,7 +14235,7 @@ public String getAttrTypeDisplayName(int attrTypeID) throws TskCoreException {
 		} finally {
 			closeResultSet(rs);
 			closeStatement(s);
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12478,19 +14271,17 @@ public ArrayList<BlackboardAttribute.ATTRIBUTE_TYPE> getBlackboardAttributeTypes
 	 */
 	@Deprecated
 	public ResultSet runQuery(String query) throws SQLException {
-		CaseDbConnection connection;
+		CaseDbConnection connection = null;
+		acquireSingleUserCaseReadLock();
 		try {
 			connection = connections.getConnection();
+			return connection.executeQuery(connection.createStatement(), query);
 		} catch (TskCoreException ex) {
 			throw new SQLException("Error getting connection for ad hoc query", ex);
-		}
-		acquireSingleUserCaseReadLock();
-		try {
-			return connection.executeQuery(connection.createStatement(), query);
 		} finally {
 			//TODO unlock should be done in closeRunQuery()
 			//but currently not all code calls closeRunQuery - need to fix this
-			connection.close();
+			closeConnection(connection);
 			releaseSingleUserCaseReadLock();
 		}
 	}
@@ -12616,7 +14407,7 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 				isFile, parentFile, rederiveDetails, toolName, toolVersion,
 				otherDetails, TskData.EncodingType.NONE);
 	}
-	
+
 	/**
 	 * Adds a local/logical file to the case database. The database operations
 	 * are done within a caller-managed transaction; the caller is responsible
@@ -12643,7 +14434,7 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 	 *
 	 * @throws TskCoreException if there is an error completing a case database
 	 *                          operation.
-	 * 
+	 *
 	 * @deprecated Use the newer version with explicit sha256 parameter
 	 */
 	@Deprecated
@@ -12651,8 +14442,8 @@ public LocalFile addLocalFile(String fileName, String localPath,
 			long size, long ctime, long crtime, long atime, long mtime,
 			String md5, FileKnown known, String mimeType,
 			boolean isFile, TskData.EncodingType encodingType,
-			Content parent, CaseDbTransaction transaction) throws TskCoreException {	
-		
+			Content parent, CaseDbTransaction transaction) throws TskCoreException {
+
 		return addLocalFile(fileName, localPath, size, ctime, crtime, atime, mtime,
 				md5, null, known, mimeType, isFile, encodingType,
 				parent, transaction);
@@ -12759,6 +14550,28 @@ public Collection<FileSystem> getFileSystems(Image image) {
 			return new ArrayList<>();
 		}
 	}
+	
+	/**
+	 * Find all files in the data source, by name and parent
+	 *
+	 * @param dataSource the dataSource (Image, parent-less VirtualDirectory) to
+	 *                   search for the given file name
+	 * @param fileName   Pattern of the name of the file or directory to match
+	 *                   (case insensitive, used in LIKE SQL statement).
+	 * @param parentFile Object for parent file/directory to find children in
+	 *
+	 * @return a list of AbstractFile for files/directories whose name matches
+	 *         fileName and that were inside a directory described by
+	 *         parentFile.
+	 *
+	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * 
+	 * @deprecated Use findFilesInFolder()
+	 */
+	@Deprecated
+	public List<AbstractFile> findFiles(Content dataSource, String fileName, AbstractFile parentFile) throws TskCoreException {
+		return findFilesInFolder(fileName, parentFile);
+	}
 
 	/**
 	 * Acquires a write lock, but only if this is a single-user case. Always
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitItemVisitor.java b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitItemVisitor.java
index 7a01c580a8313aa35e05872c4adeceddc9eed53d..5e90113f99b0647e451fb2d5cef4c80cca79a0b2 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitItemVisitor.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitItemVisitor.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2011-2018 Basis Technology Corp.
+ * Copyright 2011-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -177,6 +177,33 @@ public interface SleuthkitItemVisitor<T> {
 	 * @return result of the visit
 	 */
 	T visit(Report report);
+	
+	/**
+	 * Act on (visit) a OsAccount content object
+	 *
+	 * @param account report to visit / act on
+	 *
+	 * @return result of the visit
+	 */
+	T visit(OsAccount account);
+	
+	/**
+	 * Act on (visit) an UnsupportedContent object
+	 *
+	 * @param unsupportedContent content to visit / act on
+	 *
+	 * @return result of the visit
+	 */
+	T visit(UnsupportedContent unsupportedContent);
+	
+	/**
+	 * Act on (visit) a LocalFilesDataSource content object
+	 *
+	 * @param localFilesDataSource report to visit / act on
+	 *
+	 * @return result of the visit
+	 */
+	T visit(LocalFilesDataSource localFilesDataSource);
 
 	/**
 	 * The default visitor - quickest method for implementing a custom visitor.
@@ -270,5 +297,20 @@ public T visit(SlackFile sf) {
 		public T visit(Report report) {
 			return defaultVisit(report);
 		}
+		
+		@Override
+		public T visit(OsAccount account) {
+			return defaultVisit(account);
+		}
+		
+		@Override
+		public T visit(UnsupportedContent unsupportedContent) {
+			return defaultVisit(unsupportedContent);
+		}
+		
+		@Override
+		public T visit(LocalFilesDataSource localFilesDataSource) {
+			return defaultVisit(localFilesDataSource);
+		}
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitJNI.java b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitJNI.java
index fbdec24d6c0af560845382c3a8d68d436557b5ea..1e847df0c96e8c957c11b758b5907c211476ceed 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitJNI.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitJNI.java
@@ -438,9 +438,20 @@ void free() throws TskCoreException {
 		 * @throws TskCoreException if there is an error adding the image to
 		 *                          case database.
 		 */
-		long addImageInfo(long deviceObjId, List<String> imageFilePaths, String timeZone, SleuthkitCase skCase) throws TskCoreException {
-			TskCaseDbBridge dbHelper = new TskCaseDbBridge(skCase, new DefaultAddDataSourceCallbacks());
+		long addImageInfo(long deviceObjId, List<String> imageFilePaths, String timeZone, Host host, SleuthkitCase skCase) throws TskCoreException {
+			
 			try {
+				if (host == null) {
+					String hostName;
+					if (imageFilePaths.size() > 0) {
+						String path = imageFilePaths.get(0);
+						hostName = (new java.io.File(path)).getName() + " Host";
+					} else {
+						hostName = "Image_" + deviceObjId + " Host";
+					}
+					host = skCase.getHostManager().newHost(hostName);
+				}
+				TskCaseDbBridge dbHelper = new TskCaseDbBridge(skCase, new DefaultAddDataSourceCallbacks(), host);
 				long tskAutoDbPointer = initializeAddImgNat(dbHelper, timezoneLongToShort(timeZone), false, false, false);
 				runOpenAndAddImgNat(tskAutoDbPointer, UUID.randomUUID().toString(), imageFilePaths.toArray(new String[0]), imageFilePaths.size(), timeZone);				
 				long id = finishAddImgNat(tskAutoDbPointer);
@@ -532,7 +543,7 @@ public void run(String deviceId, String[] imageFilePaths, int sectorSize) throws
 				Image img = addImageToDatabase(skCase, imageFilePaths, sectorSize, "", "", "", "", deviceId);
 				run(deviceId, img, sectorSize, new DefaultAddDataSourceCallbacks());
 			}
-
+			
 			/**
 			 * Starts the process of adding an image to the case database.
 			 *
@@ -551,8 +562,9 @@ public void run(String deviceId, String[] imageFilePaths, int sectorSize) throws
 			 *                          the process)
 			 */
 			public void run(String deviceId, Image image, int sectorSize, 
-					AddDataSourceCallbacks addDataSourceCallbacks) throws TskCoreException, TskDataException {			
-				dbHelper = new TskCaseDbBridge(skCase, addDataSourceCallbacks);
+					AddDataSourceCallbacks addDataSourceCallbacks) throws TskCoreException, TskDataException {	
+				
+				dbHelper = new TskCaseDbBridge(skCase, addDataSourceCallbacks, image.getHost());
 				getTSKReadLock();
 				try {
 					long imageHandle = 0;
@@ -940,6 +952,29 @@ private static void cacheImageHandle(SleuthkitCase skCase, List<String> imagePat
 	public static Image addImageToDatabase(SleuthkitCase skCase, String[] imagePaths, int sectorSize,
 		String timeZone, String md5fromSettings, String sha1fromSettings, String sha256fromSettings, String deviceId) throws TskCoreException {
 		
+		return addImageToDatabase(skCase, imagePaths, sectorSize, timeZone, md5fromSettings, sha1fromSettings, sha256fromSettings, deviceId, null);
+	}	
+	
+	/**
+	 * Add an image to the database and return the open image.
+	 * 
+	 * @param skCase     The current case.
+	 * @param imagePaths The path(s) to the image (will just be the first for .e01, .001, etc).
+	 * @param sectorSize The sector size (0 for auto-detect).
+	 * @param timeZone   The time zone.
+	 * @param md5fromSettings        MD5 hash (if known).
+	 * @param sha1fromSettings       SHA1 hash (if known).
+	 * @param sha256fromSettings     SHA256 hash (if known).
+	 * @param deviceId   Device ID.
+	 * @param host       Host.
+	 * 
+	 * @return The Image object.
+	 * 
+	 * @throws TskCoreException 
+	 */
+	public static Image addImageToDatabase(SleuthkitCase skCase, String[] imagePaths, int sectorSize,
+		String timeZone, String md5fromSettings, String sha1fromSettings, String sha256fromSettings, String deviceId, Host host) throws TskCoreException {
+		
 		// Open the image
 		long imageHandle = openImgNat(imagePaths, 1, sectorSize);
 		
@@ -970,7 +1005,7 @@ public static Image addImageToDatabase(SleuthkitCase skCase, String[] imagePaths
 			Image img = skCase.addImage(TskData.TSK_IMG_TYPE_ENUM.valueOf(type), computedSectorSize, 
 				size, null, computedPaths, 
 				timeZone, md5, sha1, sha256, 
-				deviceId, transaction);
+				deviceId, host, transaction);
 			if (!StringUtils.isEmpty(collectionDetails)) {
 				skCase.setAcquisitionDetails(img, collectionDetails);
 			}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SpecialDirectory.java b/bindings/java/src/org/sleuthkit/datamodel/SpecialDirectory.java
index 5db5888d5b34d8a0ebbd926d18b1fa52559c2512..031b8ace7600e4b68659d0b6d04904b1c05b7a06 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/SpecialDirectory.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SpecialDirectory.java
@@ -48,7 +48,7 @@ public abstract class SpecialDirectory extends AbstractFile {
 			String mimeType) {
 		super(db, objId, dataSourceObjectId, attrType, attrId, name,
 				fileType, metaAddr, metaSeq, dirType, metaType, dirFlag,
-				metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, null);
+				metaFlags, size, ctime, crtime, atime, mtime, modes, uid, gid, md5Hash, sha256Hash, knownState, parentPath, mimeType, null, OsAccount.NO_OWNER_ID, OsAccount.NO_ACCOUNT, Collections.emptyList());
 	}
 
 	/**
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TaggingManager.java b/bindings/java/src/org/sleuthkit/datamodel/TaggingManager.java
index 105121d36a611c34fa8a43f69fc3bea34597a5fe..df1d09bc61ccd88ac8908efa6972d2f05446cb8a 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/TaggingManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TaggingManager.java
@@ -24,7 +24,9 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.List;
+import java.util.Optional;
 import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
+import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 import static org.sleuthkit.datamodel.TskData.DbType.POSTGRESQL;
 
 /**
@@ -52,10 +54,10 @@ public class TaggingManager {
 	 */
 	public List<TagSet> getTagSets() throws TskCoreException {
 		List<TagSet> tagSetList = new ArrayList<>();
-		CaseDbConnection connection = skCase.getConnection();
+		
 		skCase.acquireSingleUserCaseReadLock();
 		String getAllTagSetsQuery = "SELECT * FROM tsk_tag_sets";
-		try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(getAllTagSetsQuery);) {
+		try (CaseDbConnection connection = skCase.getConnection();Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(getAllTagSetsQuery);) {
 			while (resultSet.next()) {
 				int setID = resultSet.getInt("tag_set_id");
 				String setName = resultSet.getString("name");
@@ -65,7 +67,6 @@ public List<TagSet> getTagSets() throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error occurred getting TagSet list.", ex);
 		} finally {
-			connection.close();
 			skCase.releaseSingleUserCaseReadLock();
 		}
 		return tagSetList;
@@ -88,10 +89,8 @@ public TagSet addTagSet(String name, List<TagName> tagNames) throws TskCoreExcep
 
 		TagSet tagSet = null;
 
-		CaseDbConnection connection = skCase.getConnection();
-		skCase.acquireSingleUserCaseWriteLock();
-		try (Statement stmt = connection.createStatement()) {
-			connection.beginTransaction();
+		CaseDbTransaction trans = skCase.beginTransaction();
+		try (Statement stmt = trans.getConnection().createStatement()) {
 			String query = String.format("INSERT INTO tsk_tag_sets (name) VALUES('%s')", name);
 
 			if (skCase.getDatabaseType() == POSTGRESQL) {
@@ -123,13 +122,10 @@ public TagSet addTagSet(String name, List<TagName> tagNames) throws TskCoreExcep
 				}
 				tagSet = new TagSet(setID, name, updatedTags);
 			}
-			connection.commitTransaction();
+			trans.commit();
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			trans.rollback();
 			throw new TskCoreException(String.format("Error adding tag set %s", name), ex);
-		} finally {
-			connection.close();
-			skCase.releaseSingleUserCaseWriteLock();
 		}
 
 		return tagSet;
@@ -154,22 +150,17 @@ public void deleteTagSet(TagSet tagSet) throws TskCoreException {
 			throw new TskCoreException("Unable to delete TagSet (%d). TagSet TagName list contains TagNames that are currently in use.");
 		}
 
-		try (CaseDbConnection connection = skCase.getConnection()) {
-			skCase.acquireSingleUserCaseWriteLock();
-			try (Statement stmt = connection.createStatement()) {
-				connection.beginTransaction();
-				String queryTemplate = "DELETE FROM tag_names WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)";
-				stmt.execute(String.format(queryTemplate, tagSet.getId()));
-
-				queryTemplate = "DELETE FROM tsk_tag_sets WHERE tag_set_id = '%d'";
-				stmt.execute(String.format(queryTemplate, tagSet.getId()));
-				connection.commitTransaction();
-			} catch (SQLException ex) {
-				connection.rollbackTransaction();
-				throw new TskCoreException(String.format("Error deleting tag set where id = %d.", tagSet.getId()), ex);
-			} finally {
-				skCase.releaseSingleUserCaseWriteLock();
-			}
+		CaseDbTransaction trans = skCase.beginTransaction();
+		try (Statement stmt = trans.getConnection().createStatement()) {
+			String queryTemplate = "DELETE FROM tag_names WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)";
+			stmt.execute(String.format(queryTemplate, tagSet.getId()));
+
+			queryTemplate = "DELETE FROM tsk_tag_sets WHERE tag_set_id = '%d'";
+			stmt.execute(String.format(queryTemplate, tagSet.getId()));
+			trans.commit();
+		} catch (SQLException ex) {
+			trans.rollback();
+			throw new TskCoreException(String.format("Error deleting tag set where id = %d.", tagSet.getId()), ex);
 		}
 	}
 
@@ -191,11 +182,10 @@ public TagSet getTagSet(TagName tagName) throws TskCoreException {
 			return null;
 		}
 		
-		CaseDbConnection connection = skCase.getConnection();
 		skCase.acquireSingleUserCaseReadLock();
 		TagSet tagSet = null;
 		String sqlQuery = String.format("SELECT * FROM tsk_tag_sets WHERE tag_set_id = %d", tagName.getTagSetId());
-		try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(sqlQuery);) {
+		try (CaseDbConnection connection = skCase.getConnection();Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(sqlQuery);) {
 			if (resultSet.next()) {
 				int setID = resultSet.getInt("tag_set_id");
 				String setName = resultSet.getString("name");
@@ -205,7 +195,6 @@ public TagSet getTagSet(TagName tagName) throws TskCoreException {
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error occurred getting TagSet for TagName '%s' (ID=%d)", tagName.getDisplayName(), tagName.getId()), ex);
 		} finally {
-			connection.close();
 			skCase.releaseSingleUserCaseReadLock();
 		}
 	}
@@ -228,12 +217,10 @@ public BlackboardArtifactTagChange addArtifactTag(BlackboardArtifact artifact, T
 			throw new IllegalArgumentException("NULL argument passed to addArtifactTag");
 		}
 
-		CaseDbConnection connection = skCase.getConnection();
-		skCase.acquireSingleUserCaseWriteLock();
 		List<BlackboardArtifactTag> removedTags = new ArrayList<>();
 		List<String> removedTagIds = new ArrayList<>();
+		CaseDbTransaction trans = skCase.beginTransaction();
 		try {
-			connection.beginTransaction();
 			// If a TagName is part of a TagSet remove any existing tags from the
 			// set that are currenctly on the artifact
 			long tagSetId = tagName.getTagSetId();
@@ -242,7 +229,7 @@ public BlackboardArtifactTagChange addArtifactTag(BlackboardArtifact artifact, T
 				// TagName for the given artifact.
 				String selectQuery = String.format("SELECT * from blackboard_artifact_tags JOIN tag_names ON tag_names.tag_name_id = blackboard_artifact_tags.tag_name_id JOIN tsk_examiners on tsk_examiners.examiner_id = blackboard_artifact_tags.examiner_id WHERE artifact_id = %d AND tag_names.tag_set_id = %d", artifact.getArtifactID(), tagSetId);
 
-				try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(selectQuery)) {
+				try (Statement stmt = trans.getConnection().createStatement(); ResultSet resultSet = stmt.executeQuery(selectQuery)) {
 					while (resultSet.next()) {
 						TagName removedTag = new TagName(
 								resultSet.getLong("tag_name_id"),
@@ -270,15 +257,15 @@ public BlackboardArtifactTagChange addArtifactTag(BlackboardArtifact artifact, T
 				if (!removedTags.isEmpty()) {
 					// Remove the tags.
 					String removeQuery = String.format("DELETE FROM blackboard_artifact_tags WHERE tag_id IN (%s)", String.join(",", removedTagIds));
-					try (Statement stmt = connection.createStatement()) {
+					try (Statement stmt = trans.getConnection().createStatement()) {
 						stmt.executeUpdate(removeQuery);
 					}
 				}
 			}
 
 			// Add the new Tag.
-			BlackboardArtifactTag artifactTag = null;
-			try (Statement stmt = connection.createStatement()) {
+			BlackboardArtifactTag artifactTag;
+			try (Statement stmt = trans.getConnection().createStatement()) {
 				Examiner currentExaminer = skCase.getCurrentExaminer();
 				String query = String.format(
 						"INSERT INTO blackboard_artifact_tags (artifact_id, tag_name_id, comment, examiner_id) VALUES (%d, %d, '%s', %d)",
@@ -299,17 +286,73 @@ public BlackboardArtifactTagChange addArtifactTag(BlackboardArtifact artifact, T
 							artifact, skCase.getContentById(artifact.getObjectID()), tagName, comment, currentExaminer.getLoginName());
 				}
 			}
+			
+			skCase.getScoringManager().updateAggregateScoreAfterAddition(
+					artifact.getId(), artifact.getDataSourceObjectID(), getTagScore(tagName.getKnownStatus()), trans);
 
-			connection.commitTransaction();
+			trans.commit();
 
 			return new BlackboardArtifactTagChange(artifactTag, removedTags);
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			trans.rollback();
 			throw new TskCoreException("Error adding row to blackboard_artifact_tags table (obj_id = " + artifact.getArtifactID() + ", tag_name_id = " + tagName.getId() + ")", ex);
-		} finally {
+		}
+	}
+	
 
-			connection.close();
-			skCase.releaseSingleUserCaseWriteLock();
+	/**
+	 * Returns the score based on this TagName object.
+	 * @param knownStatus The known status of the tag.
+	 * @return The relevant score.
+	 */
+	static Score getTagScore(TskData.FileKnown knownStatus) {
+		switch (knownStatus) {
+			case BAD: 
+				return Score.SCORE_NOTABLE;
+			case UNKNOWN: 
+			case KNOWN:
+			default:
+				return Score.SCORE_LIKELY_NOTABLE;
+		}
+	}
+	
+		/**
+	 * Retrieves the maximum FileKnown status of any tag associated with the
+	 * object id.
+	 *
+	 * @param objectId   The object id of the item.
+	 * @param transaction The case db transaction to perform this query.
+	 *
+	 * @return The maximum FileKnown status for this object or empty.
+	 *
+	 * @throws TskCoreException
+	 */
+	Optional<TskData.FileKnown> getMaxTagKnownStatus(long objectId, CaseDbTransaction transaction) throws TskCoreException {
+		// query content tags and blackboard artifact tags for highest 
+		// known status associated with a tag associated with this object id
+		String queryString = "SELECT tag_names.knownStatus AS knownStatus\n"
+				+ "	FROM (\n"
+				+ "		SELECT ctags.tag_name_id AS tag_name_id FROM content_tags ctags WHERE ctags.obj_id = " + objectId + "\n"
+				+ "	    UNION\n"
+				+ "	    SELECT btags.tag_name_id AS tag_name_id FROM blackboard_artifact_tags btags \n"
+				+ "	    INNER JOIN blackboard_artifacts ba ON btags.artifact_id = ba.artifact_id\n"
+				+ "	    WHERE ba.artifact_obj_id = " + objectId + "\n"
+				+ "	) tag_name_ids\n"
+				+ "	INNER JOIN tag_names ON tag_name_ids.tag_name_id = tag_names.tag_name_id\n"
+				+ "	ORDER BY tag_names.knownStatus DESC\n"
+				+ "	LIMIT 1";
+
+		try (Statement statement = transaction.getConnection().createStatement();
+				ResultSet resultSet = transaction.getConnection().executeQuery(statement, queryString);) {
+
+			if (resultSet.next()) {
+				return Optional.ofNullable(TskData.FileKnown.valueOf(resultSet.getByte("knownStatus")));
+			} else {
+				return Optional.empty();
+			}
+
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting content tag FileKnown status for content with id: " + objectId);
 		}
 	}
 
@@ -327,18 +370,16 @@ public BlackboardArtifactTagChange addArtifactTag(BlackboardArtifact artifact, T
 	 * @throws TskCoreException
 	 */
 	public ContentTagChange addContentTag(Content content, TagName tagName, String comment, long beginByteOffset, long endByteOffset) throws TskCoreException {
-		CaseDbConnection connection = skCase.getConnection();
 		List<ContentTag> removedTags = new ArrayList<>();
 		List<String> removedTagIds = new ArrayList<>();
-		skCase.acquireSingleUserCaseWriteLock();
+		CaseDbTransaction trans = skCase.beginTransaction();
 		try {
-			connection.beginTransaction();
 			long tagSetId = tagName.getTagSetId();
 
 			if (tagSetId > 0) {
 				String selectQuery = String.format("SELECT * from content_tags JOIN tag_names ON tag_names.tag_name_id = content_tags.tag_name_id JOIN tsk_examiners on tsk_examiners.examiner_id = content_tags.examiner_id WHERE obj_id = %d AND tag_names.tag_set_id = %d", content.getId(), tagSetId);
 
-				try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(selectQuery)) {
+				try (Statement stmt = trans.getConnection().createStatement(); ResultSet resultSet = stmt.executeQuery(selectQuery)) {
 					while (resultSet.next()) {
 						TagName removedTag = new TagName(
 								resultSet.getLong("tag_name_id"),
@@ -366,7 +407,7 @@ public ContentTagChange addContentTag(Content content, TagName tagName, String c
 
 				if (!removedTags.isEmpty()) {
 					String removeQuery = String.format("DELETE FROM content_tags WHERE tag_id IN (%s)", String.join(",", removedTagIds));
-					try (Statement stmt = connection.createStatement()) {
+					try (Statement stmt = trans.getConnection().createStatement()) {
 						stmt.executeUpdate(removeQuery);
 					}
 				}
@@ -374,7 +415,7 @@ public ContentTagChange addContentTag(Content content, TagName tagName, String c
 
 			String queryTemplate = "INSERT INTO content_tags (obj_id, tag_name_id, comment, begin_byte_offset, end_byte_offset, examiner_id) VALUES (%d, %d, '%s', %d, %d, %d)";
 			ContentTag contentTag = null;
-			try (Statement stmt = connection.createStatement()) {
+			try (Statement stmt = trans.getConnection().createStatement()) {
 				Examiner currentExaminer = skCase.getCurrentExaminer();
 				String query = String.format(queryTemplate,
 						content.getId(),
@@ -396,15 +437,16 @@ public ContentTagChange addContentTag(Content content, TagName tagName, String c
 							content, tagName, comment, beginByteOffset, endByteOffset, currentExaminer.getLoginName());
 				}
 			}
+			
+			Long dataSourceId = content.getDataSource() != null ? content.getDataSource().getId() : null;
+			skCase.getScoringManager().updateAggregateScoreAfterAddition(
+					content.getId(), dataSourceId, getTagScore(tagName.getKnownStatus()), trans);
 
-			connection.commitTransaction();
+			trans.commit();
 			return new ContentTagChange(contentTag, removedTags);
 		} catch (SQLException ex) {
-			connection.rollbackTransaction();
+			trans.rollback();
 			throw new TskCoreException("Error adding row to content_tags table (obj_id = " + content.getId() + ", tag_name_id = " + tagName.getId() + ")", ex);
-		} finally {
-			connection.close();
-			skCase.releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -419,32 +461,30 @@ public ContentTagChange addContentTag(Content content, TagName tagName, String c
 	 * @throws TskCoreException
 	 */
 	private boolean isTagSetInUse(TagSet tagSet) throws TskCoreException {
+		skCase.acquireSingleUserCaseReadLock();
 		try (CaseDbConnection connection = skCase.getConnection()) {
 			List<TagName> tagNameList = tagSet.getTagNames();
 			if (tagNameList != null && !tagNameList.isEmpty()) {
-				skCase.acquireSingleUserCaseReadLock();
-				try {
-					String statement = String.format("SELECT tag_id FROM content_tags WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)", tagSet.getId());
-					try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(statement)) {
-						if (resultSet.next()) {
-							return true;
-						}
-					} catch (SQLException ex) {
-						throw new TskCoreException(String.format("Failed to determine if TagSet is in use (%s)", tagSet.getId()), ex);
+				String statement = String.format("SELECT tag_id FROM content_tags WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)", tagSet.getId());
+				try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(statement)) {
+					if (resultSet.next()) {
+						return true;
 					}
+				} catch (SQLException ex) {
+					throw new TskCoreException(String.format("Failed to determine if TagSet is in use (%s)", tagSet.getId()), ex);
+				}
 
-					statement = String.format("SELECT tag_id FROM blackboard_artifact_tags WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)", tagSet.getId());
-					try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(statement)) {
-						if (resultSet.next()) {
-							return true;
-						}
-					} catch (SQLException ex) {
-						throw new TskCoreException(String.format("Failed to determine if TagSet is in use (%s)", tagSet.getId()), ex);
+				statement = String.format("SELECT tag_id FROM blackboard_artifact_tags WHERE tag_name_id IN (SELECT tag_name_id FROM tag_names WHERE tag_set_id = %d)", tagSet.getId());
+				try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(statement)) {
+					if (resultSet.next()) {
+						return true;
 					}
-				} finally {
-					skCase.releaseSingleUserCaseReadLock();
+				} catch (SQLException ex) {
+					throw new TskCoreException(String.format("Failed to determine if TagSet is in use (%s)", tagSet.getId()), ex);
 				}
 			}
+		} finally {
+			skCase.releaseSingleUserCaseReadLock();
 		}
 
 		return false;
@@ -467,11 +507,9 @@ private List<TagName> getTagNamesByTagSetID(int tagSetId) throws TskCoreExceptio
 
 		List<TagName> tagNameList = new ArrayList<>();
 
-		CaseDbConnection connection = skCase.getConnection();
-
 		skCase.acquireSingleUserCaseReadLock();
 		String query = String.format("SELECT * FROM tag_names WHERE tag_set_id = %d", tagSetId);
-		try (Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(query)) {
+		try (CaseDbConnection connection = skCase.getConnection();Statement stmt = connection.createStatement(); ResultSet resultSet = stmt.executeQuery(query)) {
 			while (resultSet.next()) {
 				tagNameList.add(new TagName(resultSet.getLong("tag_name_id"),
 						resultSet.getString("display_name"),
@@ -484,7 +522,6 @@ private List<TagName> getTagNamesByTagSetID(int tagSetId) throws TskCoreExceptio
 		} catch (SQLException ex) {
 			throw new TskCoreException(String.format("Error getting tag names for tag set (%d)", tagSetId), ex);
 		} finally {
-			connection.close();
 			skCase.releaseSingleUserCaseReadLock();
 		}
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimeUtilities.java b/bindings/java/src/org/sleuthkit/datamodel/TimeUtilities.java
index fa25bf6bdc96174659daa0da1225e94ce0df04b4..4e4648cfab0ef51e517a19978274f159e0c10cd2 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimeUtilities.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimeUtilities.java
@@ -20,6 +20,7 @@
 package org.sleuthkit.datamodel;
 
 import java.text.SimpleDateFormat;
+import java.util.Date;
 import java.util.TimeZone;
 import java.util.logging.Level;
 import java.util.logging.Logger;
@@ -32,8 +33,11 @@ public class TimeUtilities {
 	private static final Logger LOGGER = Logger.getLogger(TimeUtilities.class.getName());
 	private static final SimpleDateFormat DATE_FORMATTER = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
 	
+	private TimeUtilities(){		
+	}
+	
 	/**
-	 * Return the epoch into string in ISO 8601 dateTime format
+	 * Return the epoch into string in yyyy-MM-dd HH:mm:ss format
 	 *
 	 * @param epoch time in seconds
 	 *
@@ -48,7 +52,7 @@ public static String epochToTime(long epoch) {
 	}
 
 	/**
-	 * Return the epoch into string in ISO 8601 dateTime format, 
+	 * Return the epoch into string in yyyy-MM-dd HH:mm:ss format, 
 	 * in the given timezone
 	 *
 	 * @param epoch time in seconds
@@ -67,6 +71,20 @@ public static String epochToTime(long epoch, TimeZone tzone) {
 		return time;
 	}
 	
+	/**
+	 * Return the epoch into string in ISO8601 format, in the given timezone.
+	 *
+	 * @param epoch time in seconds
+	 * @param tzone time zone
+	 *
+	 * @return formatted date time string as
+	 */
+	public static String epochToTimeISO8601(long epoch, TimeZone tzone) {
+		SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'");
+		formatter.setTimeZone(tzone);
+		return formatter.format(new Date(epoch));
+	}
+	
 	/**
 	 * Convert from ISO 8601 formatted date time string to epoch time in seconds
 	 *
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeSingleDescription.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeSingleDescription.java
index 84f71ba7db494afe871ab6b6cd69734035b7d696..6134d6d1906145f9ff10aedfb40f39a6cd164e51 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeSingleDescription.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeSingleDescription.java
@@ -39,12 +39,11 @@ public TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact
 		BlackboardAttribute timeAttribute = artifact.getAttribute(getDateTimeAttributeType());
 
 		if (timeAttribute == null) {
-			logger.log(Level.WARNING, "Artifact {0} has no date/time attribute, skipping it.", artifact.toString()); // NON-NLS
 			return null;
 		}
 
 		long time = timeAttribute.getValueLong();
-		return new TimelineEventDescriptionWithTime(time, null, null, description);
+		return new TimelineEventDescriptionWithTime(time, timeAttribute.getDisplayString(), null, description);
 	}
 
 	TimelineEventArtifactTypeSingleDescription(int typeID, String displayName,
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java
index 43f6749b46e3545daa95b9f091391402b62e4080..3b36df70a98bc1761ff2282b74c218a03b4dae5f 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2018-2019 Basis Technology Corp.
+ * Copyright 2018-2021 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -125,7 +125,7 @@ default SortedSet<? extends TimelineEventType> getSiblings() {
 
 	@Override
 	default int compareTo(TimelineEventType otherType) {
-		return Comparator.comparing(TimelineEventType::getTypeID).compare(this, otherType);
+		return Comparator.comparing(TimelineEventType::getDisplayName).compare(this, otherType);
 	}
 
 	/**
@@ -181,9 +181,18 @@ private HierarchyLevel(String displayName) {
 	TimelineEventType ROOT_EVENT_TYPE = new TimelineEventTypeImpl(0,
 			getBundle().getString("RootEventType.eventTypes.name"), // NON-NLS
 			HierarchyLevel.ROOT, null) {
+				
 		@Override
 		public SortedSet< TimelineEventType> getChildren() {
-			return ImmutableSortedSet.of(FILE_SYSTEM, WEB_ACTIVITY, MISC_TYPES, CUSTOM_TYPES);
+			ImmutableSortedSet.Builder<TimelineEventType> builder = ImmutableSortedSet.orderedBy(new Comparator<TimelineEventType>() {
+				@Override
+				public int compare(TimelineEventType o1, TimelineEventType o2) {
+					return ((Long) o1.getTypeID()).compareTo(o2.getTypeID());
+				}
+			});
+
+			builder.add(FILE_SYSTEM, WEB_ACTIVITY, MISC_TYPES, CUSTOM_TYPES);
+			return builder.build();
 		}
 	};
 
@@ -202,8 +211,12 @@ public SortedSet< TimelineEventType> getChildren() {
 			HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) {
 		@Override
 		public SortedSet< TimelineEventType> getChildren() {
-			return ImmutableSortedSet.of(WEB_DOWNLOADS, WEB_COOKIE, WEB_BOOKMARK,
-					WEB_HISTORY, WEB_SEARCH, WEB_FORM_AUTOFILL, WEB_FORM_ADDRESSES);
+			return ImmutableSortedSet.of(WEB_DOWNLOADS, WEB_COOKIE,
+					WEB_COOKIE_ACCESSED,
+					WEB_COOKIE_END, WEB_BOOKMARK,
+					WEB_HISTORY, WEB_SEARCH, WEB_FORM_AUTOFILL,
+					WEB_FORM_ADDRESSES, WEB_FORM_ADDRESSES_MODIFIED,
+					WEB_FORM_AUTOFILL_ACCESSED, WEB_CACHE, WEB_HISTORY_CREATED);
 		}
 	};
 
@@ -214,35 +227,32 @@ public SortedSet< TimelineEventType> getChildren() {
 			HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) {
 		@Override
 		public SortedSet<TimelineEventType> getChildren() {
-			ImmutableSortedSet.Builder<TimelineEventType> builder = ImmutableSortedSet.orderedBy(new Comparator<TimelineEventType>() {
-				@Override
-				public int compare(TimelineEventType o1, TimelineEventType o2) {
-					return o1.getDisplayName().compareTo(o2.getDisplayName());
-				}
-			});
-
-			builder.add(CALL_LOG, DEVICES_ATTACHED, EMAIL,
+			return ImmutableSortedSet.of(CALL_LOG, CALL_LOG_END, DEVICES_ATTACHED, EMAIL, EMAIL_RCVD,
 					EXIF, GPS_BOOKMARK, GPS_LAST_KNOWN_LOCATION, GPS_TRACKPOINT,
 					GPS_ROUTE, GPS_SEARCH, GPS_TRACK, INSTALLED_PROGRAM, LOG_ENTRY, MESSAGE,
 					METADATA_LAST_PRINTED, METADATA_LAST_SAVED, METADATA_CREATED, PROGRAM_EXECUTION,
-					RECENT_DOCUMENTS, REGISTRY);
+					RECENT_DOCUMENTS, REGISTRY, BACKUP_EVENT_START, BACKUP_EVENT_END,
+					BLUETOOTH_PAIRING, CALENDAR_ENTRY_START, CALENDAR_ENTRY_END,
+					PROGRAM_DELETED,
+					OS_INFO, WIFI_NETWORK, USER_DEVICE_EVENT_START, USER_DEVICE_EVENT_END,
+					SERVICE_ACCOUNT, SCREEN_SHOT, PROGRAM_NOTIFICATION,
+					BLUETOOTH_PAIRING_ACCESSED, BLUETOOTH_ADAPTER);
 
-			return builder.build();
 		}
 	};
 
 	TimelineEventType FILE_MODIFIED = new FilePathEventType(4,
 			getBundle().getString("FileSystemTypes.fileModified.name"), // NON-NLS
 			HierarchyLevel.EVENT, FILE_SYSTEM);
-	
+
 	TimelineEventType FILE_ACCESSED = new FilePathEventType(5,
 			getBundle().getString("FileSystemTypes.fileAccessed.name"), // NON-NLS
 			HierarchyLevel.EVENT, FILE_SYSTEM);
-	
+
 	TimelineEventType FILE_CREATED = new FilePathEventType(6,
 			getBundle().getString("FileSystemTypes.fileCreated.name"), // NON-NLS
 			HierarchyLevel.EVENT, FILE_SYSTEM);
-	
+
 	TimelineEventType FILE_CHANGED = new FilePathEventType(7,
 			getBundle().getString("FileSystemTypes.fileChanged.name"), // NON-NLS
 			HierarchyLevel.EVENT, FILE_SYSTEM);
@@ -253,28 +263,28 @@ public int compare(TimelineEventType o1, TimelineEventType o2) {
 			new BlackboardArtifact.Type(TSK_WEB_DOWNLOAD),
 			new Type(TSK_DATETIME_ACCESSED),
 			new Type(TSK_URL));
-	
+
 	TimelineEventType WEB_COOKIE = new URLArtifactEventType(9,
 			getBundle().getString("WebTypes.webCookies.name"),// NON-NLS
 			WEB_ACTIVITY,
 			new BlackboardArtifact.Type(TSK_WEB_COOKIE),
-			new Type(TSK_DATETIME),
+			new Type(TSK_DATETIME_CREATED),
 			new Type(TSK_URL));
-	
+
 	TimelineEventType WEB_BOOKMARK = new URLArtifactEventType(10,
 			getBundle().getString("WebTypes.webBookmarks.name"), // NON-NLS
 			WEB_ACTIVITY,
 			new BlackboardArtifact.Type(TSK_WEB_BOOKMARK),
 			new Type(TSK_DATETIME_CREATED),
 			new Type(TSK_URL));
-	
+
 	TimelineEventType WEB_HISTORY = new URLArtifactEventType(11,
 			getBundle().getString("WebTypes.webHistory.name"), // NON-NLS
 			WEB_ACTIVITY,
 			new BlackboardArtifact.Type(TSK_WEB_HISTORY),
 			new Type(TSK_DATETIME_ACCESSED),
 			new Type(TSK_URL));
-	
+
 	TimelineEventType WEB_SEARCH = new URLArtifactEventType(12,
 			getBundle().getString("WebTypes.webSearch.name"), // NON-NLS
 			WEB_ACTIVITY,
@@ -326,7 +336,7 @@ public int compare(TimelineEventType o1, TimelineEventType o2) {
 				final BlackboardAttribute longStart = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE_START));
 				final BlackboardAttribute latEnd = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE_END));
 				final BlackboardAttribute longEnd = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE_END));
-				return String.format("from %1$s %2$s to %3$s %4$s", stringValueOf(latStart), stringValueOf(longStart), stringValueOf(latEnd), stringValueOf(longEnd)); // NON-NLS
+				return String.format("From latitude: %1$s longitude: %2$s To latitude: %3$s longitude: %4$s", stringValueOf(latStart), stringValueOf(longStart), stringValueOf(latEnd), stringValueOf(longEnd)); // NON-NLS
 			});
 
 	@SuppressWarnings("deprecation")
@@ -339,7 +349,7 @@ public int compare(TimelineEventType o1, TimelineEventType o2) {
 			artf -> {
 				final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE));
 				final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE));
-				return stringValueOf(latitude) + " " + stringValueOf(longitude); // NON-NLS
+				return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS
 			},
 			new EmptyExtractor());
 
@@ -358,7 +368,7 @@ public int compare(TimelineEventType o1, TimelineEventType o2) {
 					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM));
 				}
 
-				return stringValueOf(phoneNumber);
+				return "Phone Number: " + stringValueOf(phoneNumber);
 			},
 			new AttributeExtractor(new Type(TSK_DIRECTION)));
 
@@ -376,7 +386,7 @@ public int compare(TimelineEventType o1, TimelineEventType o2) {
 				if (emailTo.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) {
 					emailTo = emailTo.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX);
 				}
-				return emailFrom + " to " + emailTo; // NON-NLS
+				return "Sent from: " + emailFrom + "Sent to: " + emailTo; // NON-NLS
 			},
 			new AttributeExtractor(new Type(TSK_SUBJECT)),
 			artf -> {
@@ -473,7 +483,7 @@ public SortedSet< TimelineEventType> getChildren() {
 				final BlackboardAttribute name = getAttributeSafe(artf, new Type(TSK_NAME));
 				final BlackboardAttribute value = getAttributeSafe(artf, new Type(TSK_VALUE));
 				final BlackboardAttribute count = getAttributeSafe(artf, new Type(TSK_COUNT));
-				return stringValueOf(name) + ":" + stringValueOf(value) + " count: " + stringValueOf(count); // NON-NLS
+				return stringValueOf(name) + ":" + stringValueOf(value); // NON-NLS
 			}, new EmptyExtractor(), new EmptyExtractor());
 
 	TimelineEventType WEB_FORM_ADDRESSES = new URLArtifactEventType(28,
@@ -482,7 +492,7 @@ public SortedSet< TimelineEventType> getChildren() {
 			new BlackboardArtifact.Type(TSK_WEB_FORM_ADDRESS),
 			new Type(TSK_DATETIME_ACCESSED),
 			new Type(TSK_EMAIL));
-	
+
 	TimelineEventType GPS_BOOKMARK = new TimelineEventArtifactTypeImpl(29,
 			getBundle().getString("MiscTypes.GPSBookmark.name"), // NON-NLS
 			MISC_TYPES,
@@ -492,7 +502,7 @@ public SortedSet< TimelineEventType> getChildren() {
 			artf -> {
 				final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE));
 				final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE));
-				return stringValueOf(latitude) + " " + stringValueOf(longitude); // NON-NLS
+				return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS
 			},
 			new EmptyExtractor());
 
@@ -505,7 +515,7 @@ public SortedSet< TimelineEventType> getChildren() {
 			artf -> {
 				final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE));
 				final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE));
-				return stringValueOf(latitude) + " " + stringValueOf(longitude); // NON-NLS
+				return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS
 			},
 			new EmptyExtractor());
 
@@ -518,7 +528,7 @@ public SortedSet< TimelineEventType> getChildren() {
 			artf -> {
 				final BlackboardAttribute longitude = getAttributeSafe(artf, new Type(TSK_GEO_LONGITUDE));
 				final BlackboardAttribute latitude = getAttributeSafe(artf, new Type(TSK_GEO_LATITUDE));
-				return stringValueOf(latitude) + " " + stringValueOf(longitude); // NON-NLS
+				return "Latitude: " + stringValueOf(latitude) + " Longitude: " + stringValueOf(longitude); // NON-NLS
 			},
 			new EmptyExtractor());
 
@@ -527,34 +537,39 @@ public SortedSet< TimelineEventType> getChildren() {
 			MISC_TYPES,
 			new BlackboardArtifact.Type(TSK_GPS_TRACK),
 			new Type(TSK_NAME));
-	
+
 	TimelineEventType METADATA_LAST_PRINTED = new TimelineEventArtifactTypeImpl(33,
 			getBundle().getString("MiscTypes.metadataLastPrinted.name"),// NON-NLS
 			MISC_TYPES,
 			new BlackboardArtifact.Type(TSK_METADATA),
 			new BlackboardAttribute.Type(TSK_LAST_PRINTED_DATETIME),
-            artf -> {return getBundle().getString("MiscTypes.metadataLastPrinted.name");},
-	        new EmptyExtractor(),
-	        new EmptyExtractor());
-
+			artf -> {
+				return getBundle().getString("MiscTypes.metadataLastPrinted.name");
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
 
 	TimelineEventType METADATA_LAST_SAVED = new TimelineEventArtifactTypeImpl(34,
 			getBundle().getString("MiscTypes.metadataLastSaved.name"),// NON-NLS
 			MISC_TYPES,
 			new BlackboardArtifact.Type(TSK_METADATA),
 			new BlackboardAttribute.Type(TSK_DATETIME_MODIFIED),
-            artf -> {return getBundle().getString("MiscTypes.metadataLastSaved.name");},
-	        new EmptyExtractor(),
-	       new EmptyExtractor());
+			artf -> {
+				return getBundle().getString("MiscTypes.metadataLastSaved.name");
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
 
 	TimelineEventType METADATA_CREATED = new TimelineEventArtifactTypeImpl(35,
 			getBundle().getString("MiscTypes.metadataCreated.name"),// NON-NLS
 			MISC_TYPES,
 			new BlackboardArtifact.Type(TSK_METADATA),
 			new BlackboardAttribute.Type(TSK_DATETIME_CREATED),
-            artf -> {return getBundle().getString("MiscTypes.metadataCreated.name");},
-	        new EmptyExtractor(),
-	        new EmptyExtractor());
+			artf -> {
+				return getBundle().getString("MiscTypes.metadataCreated.name");
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
 
 	TimelineEventType PROGRAM_EXECUTION = new TimelineEventArtifactTypeImpl(36,
 			getBundle().getString("MiscTypes.programexecuted.name"),// NON-NLS
@@ -562,14 +577,240 @@ public SortedSet< TimelineEventType> getChildren() {
 			new BlackboardArtifact.Type(TSK_PROG_RUN),
 			new Type(TSK_DATETIME),
 			new AttributeExtractor(new Type(TSK_PROG_NAME)),
-            artf -> {
-	                 String userName = stringValueOf(getAttributeSafe(artf, new Type(TSK_USER_NAME)));
-				     if (userName != null) {
-					    return userName;
-				     }
-	                 return "";},
+			artf -> {
+				String userName = stringValueOf(getAttributeSafe(artf, new Type(TSK_USER_NAME)));
+				if (userName != null) {
+					return userName;
+				}
+				return "";
+			},
 			new AttributeExtractor(new Type(TSK_COMMENT)));
-			
+
+	TimelineEventType WEB_FORM_AUTOFILL_ACCESSED = new TimelineEventArtifactTypeImpl(37,
+			getBundle().getString("WebTypes.webFormAutofillAccessed.name"),
+			WEB_ACTIVITY,
+			new BlackboardArtifact.Type(TSK_WEB_FORM_AUTOFILL),
+			new Type(TSK_DATETIME_ACCESSED),
+			artf -> {
+				final BlackboardAttribute name = getAttributeSafe(artf, new Type(TSK_NAME));
+				final BlackboardAttribute value = getAttributeSafe(artf, new Type(TSK_VALUE));
+				final BlackboardAttribute count = getAttributeSafe(artf, new Type(TSK_COUNT));
+				return stringValueOf(name) + ":" + stringValueOf(value) + " Access count: " + stringValueOf(count); // NON-NLS
+			}, new EmptyExtractor(), new EmptyExtractor());
+
+	TimelineEventType CALL_LOG_END = new TimelineEventArtifactTypeImpl(38,
+			getBundle().getString("MiscTypes.CallsEnd.name"), // NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_CALLLOG),
+			new Type(TSK_DATETIME_END),
+			new AttributeExtractor(new Type(TSK_NAME)),
+			artf -> {
+				BlackboardAttribute phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER));
+				if (phoneNumber == null) {
+					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_TO));
+				}
+				if (phoneNumber == null) {
+					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM));
+				}
+
+				return "Phone number: " + stringValueOf(phoneNumber);
+			},
+			new AttributeExtractor(new Type(TSK_DIRECTION)));
+
+	TimelineEventType EMAIL_RCVD = new TimelineEventArtifactTypeImpl(39,
+			getBundle().getString("MiscTypes.EmailRcvd.name"), // NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_EMAIL_MSG),
+			new Type(TSK_DATETIME_RCVD),
+			artf -> {
+				String emailFrom = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_FROM)));
+				if (emailFrom.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) {
+					emailFrom = emailFrom.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX);
+				}
+				String emailTo = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_TO)));
+				if (emailTo.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) {
+					emailTo = emailTo.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX);
+				}
+				return "Message from: " + emailFrom + " To: " + emailTo; // NON-NLS
+			},
+			new AttributeExtractor(new Type(TSK_SUBJECT)),
+			artf -> {
+				final BlackboardAttribute msgAttribute = getAttributeSafe(artf, new Type(TSK_EMAIL_CONTENT_PLAIN));
+				String msg = stringValueOf(msgAttribute);
+				if (msg.length() > TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX) {
+					msg = msg.substring(0, TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX);
+				}
+				return msg;
+			});
+
+	TimelineEventType WEB_FORM_ADDRESSES_MODIFIED = new URLArtifactEventType(40,
+			getBundle().getString("WebTypes.webFormAddressModified.name"),//NON-NLS
+			WEB_ACTIVITY,
+			new BlackboardArtifact.Type(TSK_WEB_FORM_ADDRESS),
+			new Type(TSK_DATETIME_MODIFIED),
+			new Type(TSK_EMAIL));
+
+	TimelineEventType WEB_COOKIE_ACCESSED = new URLArtifactEventType(41,
+			getBundle().getString("WebTypes.webCookiesAccessed.name"),// NON-NLS
+			WEB_ACTIVITY,
+			new BlackboardArtifact.Type(TSK_WEB_COOKIE),
+			new Type(TSK_DATETIME_ACCESSED),
+			new Type(TSK_URL));
+
+	TimelineEventType WEB_COOKIE_END = new URLArtifactEventType(42,
+			getBundle().getString("WebTypes.webCookiesEnd.name"),// NON-NLS
+			WEB_ACTIVITY,
+			new BlackboardArtifact.Type(TSK_WEB_COOKIE),
+			new Type(TSK_DATETIME_END),
+			new Type(TSK_URL));
+	
+	TimelineEventType BACKUP_EVENT_START = new TimelineEventArtifactTypeImpl(43,
+			getBundle().getString("TimelineEventType.BackupEventStart.txt"),// NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_BACKUP_EVENT),
+			new BlackboardAttribute.Type(TSK_DATETIME_START),
+			artf -> {
+				return getBundle().getString("TimelineEventType.BackupEvent.description.start");
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
+	
+	TimelineEventType BACKUP_EVENT_END = new TimelineEventArtifactTypeImpl(44,
+			getBundle().getString("TimelineEventType.BackupEventEnd.txt"),// NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_BACKUP_EVENT),
+			new BlackboardAttribute.Type(TSK_DATETIME_END),
+			artf -> {
+				return getBundle().getString("TimelineEventType.BackupEvent.description.end");
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
+	
+	TimelineEventType BLUETOOTH_PAIRING = new TimelineEventArtifactTypeSingleDescription(45,
+			getBundle().getString("TimelineEventType.BluetoothPairing.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_BLUETOOTH_PAIRING),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_DEVICE_NAME));
+	
+	TimelineEventType CALENDAR_ENTRY_START = new TimelineEventArtifactTypeSingleDescription(46,
+			getBundle().getString("TimelineEventType.CalendarEntryStart.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_CALENDAR_ENTRY),
+			new BlackboardAttribute.Type(TSK_DATETIME_START),
+			new BlackboardAttribute.Type(TSK_DESCRIPTION));
+	
+	TimelineEventType CALENDAR_ENTRY_END = new TimelineEventArtifactTypeSingleDescription(47,
+			getBundle().getString("TimelineEventType.CalendarEntryEnd.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_CALENDAR_ENTRY),
+			new BlackboardAttribute.Type(TSK_DATETIME_END),
+			new BlackboardAttribute.Type(TSK_DESCRIPTION));
+	
+	TimelineEventType PROGRAM_DELETED = new TimelineEventArtifactTypeSingleDescription(48,
+			getBundle().getString("TimelineEventType.DeletedProgram.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_DELETED_PROG),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_PROG_NAME));	
+	
+	TimelineEventType OS_INFO = new TimelineEventArtifactTypeSingleDescription(49,
+			getBundle().getString("TimelineEventType.OSInfo.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_OS_INFO),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_PROG_NAME));
+
+	TimelineEventType PROGRAM_NOTIFICATION = new TimelineEventArtifactTypeSingleDescription(50,
+			getBundle().getString("TimelineEventType.ProgramNotification.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_PROG_NOTIFICATIONS),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_PROG_NAME));
+	
+	TimelineEventType SCREEN_SHOT = new TimelineEventArtifactTypeSingleDescription(51,
+			getBundle().getString("TimelineEventType.ScreenShot.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_SCREEN_SHOTS),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_PROG_NAME));
+		
+	TimelineEventType SERVICE_ACCOUNT = new TimelineEventArtifactTypeImpl(52,
+			getBundle().getString("TimelineEventType.ServiceAccount.txt"),// NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_SERVICE_ACCOUNT),
+			new BlackboardAttribute.Type(TSK_DATETIME_CREATED),
+			artf -> {
+				String progName = stringValueOf(getAttributeSafe(artf, new Type(TSK_PROG_NAME)));
+				String userId = stringValueOf(getAttributeSafe(artf, new Type(TSK_USER_ID)));
+				return String.format("Program Name: %s User ID: %s", progName, userId);
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
+	
+	TimelineEventType USER_DEVICE_EVENT_START = new TimelineEventArtifactTypeImpl(53,
+			getBundle().getString("TimelineEventType.UserDeviceEventStart.txt"),// NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_USER_DEVICE_EVENT),
+			new BlackboardAttribute.Type(TSK_DATETIME_START),
+			artf -> {
+				String progName = stringValueOf(getAttributeSafe(artf, new Type(TSK_PROG_NAME)));
+				String activityType = stringValueOf(getAttributeSafe(artf, new Type(TSK_ACTIVITY_TYPE)));
+				String connectionType = stringValueOf(getAttributeSafe(artf, new Type(TSK_VALUE)));
+				return String.format("Program Name: %s Activity Type: %s Connection Type: %s", progName, activityType, connectionType);
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
+	
+	TimelineEventType USER_DEVICE_EVENT_END = new TimelineEventArtifactTypeImpl(54,
+			getBundle().getString("TimelineEventType.UserDeviceEventEnd.txt"),// NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_USER_DEVICE_EVENT),
+			new BlackboardAttribute.Type(TSK_DATETIME_END),
+			artf -> {
+				String progName = stringValueOf(getAttributeSafe(artf, new Type(TSK_PROG_NAME)));
+				String activityType = stringValueOf(getAttributeSafe(artf, new Type(TSK_ACTIVITY_TYPE)));
+				String connectionType = stringValueOf(getAttributeSafe(artf, new Type(TSK_VALUE)));
+				return String.format("Program Name: %s Activity Type: %s Connection Type: %s", progName, activityType, connectionType);
+			},
+			new EmptyExtractor(),
+			new EmptyExtractor());
+	
+	TimelineEventType WEB_CACHE = new URLArtifactEventType(55,
+			getBundle().getString("TimelineEventType.WebCache.text"),// NON-NLS
+			WEB_ACTIVITY,
+			new BlackboardArtifact.Type(TSK_WEB_CACHE),
+			new Type(TSK_DATETIME_CREATED),
+			new Type(TSK_URL));
+	
+	TimelineEventType WIFI_NETWORK = new TimelineEventArtifactTypeSingleDescription(56,
+			getBundle().getString("TimelineEventType.WIFINetwork.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_WIFI_NETWORK),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_SSID));
+	
+	TimelineEventType WEB_HISTORY_CREATED = new URLArtifactEventType(57,
+			getBundle().getString("WebTypes.webHistoryCreated.name"),// NON-NLS
+			WEB_ACTIVITY,
+			new BlackboardArtifact.Type(TSK_WEB_HISTORY),
+			new Type(TSK_DATETIME_CREATED),
+			new Type(TSK_URL));
+	
+	TimelineEventType BLUETOOTH_ADAPTER = new TimelineEventArtifactTypeSingleDescription(58,
+			getBundle().getString("TimelineEventType.BluetoothAdapter.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_BLUETOOTH_ADAPTER),
+			new BlackboardAttribute.Type(TSK_DATETIME),
+			new BlackboardAttribute.Type(TSK_NAME));
+	
+	TimelineEventType BLUETOOTH_PAIRING_ACCESSED = new TimelineEventArtifactTypeSingleDescription(59,
+			getBundle().getString("TimelineEventType.BluetoothPairingLastConnection.txt"),//NON-NLS
+			MISC_TYPES,
+			new BlackboardArtifact.Type(TSK_BLUETOOTH_PAIRING),
+			new BlackboardAttribute.Type(TSK_DATETIME_ACCESSED),
+			new BlackboardAttribute.Type(TSK_DEVICE_NAME));
+
 	static SortedSet<? extends TimelineEventType> getCategoryTypes() {
 		return ROOT_EVENT_TYPE.getChildren();
 	}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java
index a3771f3828631a2bcdb4e84689766ee325b6d5d0..1e165dfacf37f4d3f02589f8c126d71310750816 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java
@@ -115,20 +115,25 @@ public final class TimelineManager {
 	TimelineManager(SleuthkitCase caseDB) throws TskCoreException {
 		this.caseDB = caseDB;
 
-		//initialize root and base event types, these are added to the DB in c++ land
-		ROOT_CATEGORY_AND_FILESYSTEM_TYPES.forEach(eventType -> eventTypeIDMap.put(eventType.getTypeID(), eventType));
+		List<TimelineEventType> fullList = new ArrayList<>();
+		fullList.addAll(ROOT_CATEGORY_AND_FILESYSTEM_TYPES);
+		fullList.addAll(PREDEFINED_EVENT_TYPES);
 
-		//initialize the other event types that aren't added in c++
 		caseDB.acquireSingleUserCaseWriteLock();
 		try (final CaseDbConnection con = caseDB.getConnection();
-				final Statement statement = con.createStatement()) {
-			for (TimelineEventType type : PREDEFINED_EVENT_TYPES) {
-				con.executeUpdate(statement,
-						insertOrIgnore(" INTO tsk_event_types(event_type_id, display_name, super_type_id) "
-								+ "VALUES( " + type.getTypeID() + ", '"
-								+ escapeSingleQuotes(type.getDisplayName()) + "',"
-								+ type.getParent().getTypeID()
-								+ ")")); //NON-NLS
+				final PreparedStatement pStatement = con.prepareStatement(
+						insertOrIgnore(" INTO tsk_event_types(event_type_id, display_name, super_type_id) VALUES (?, ?, ?)"),
+						Statement.NO_GENERATED_KEYS)) {
+			for (TimelineEventType type : fullList) {
+				pStatement.setLong(1, type.getTypeID());
+				pStatement.setString(2, escapeSingleQuotes(type.getDisplayName()));
+				if (type != type.getParent()) {
+					pStatement.setLong(3, type.getParent().getTypeID());
+				} else {
+					pStatement.setNull(3, java.sql.Types.INTEGER);
+				}
+
+				con.executeUpdate(pStatement);
 				eventTypeIDMap.put(type.getTypeID(), type);
 			}
 		} catch (SQLException ex) {
@@ -461,7 +466,7 @@ public Set<Long> getEventIDsForContent(Content content, boolean includeDerivedAr
 	 * @throws TskCoreException
 	 * @throws DuplicateException
 	 */
-	private long addEventDescription(long dataSourceObjId, long fileObjId, Long artifactID,
+	private Long addEventDescription(long dataSourceObjId, long fileObjId, Long artifactID,
 			String fullDescription, String medDescription, String shortDescription,
 			boolean hasHashHits, boolean tagged, CaseDbConnection connection) throws TskCoreException, DuplicateException {
 		String tableValuesClause
@@ -475,7 +480,8 @@ private long addEventDescription(long dataSourceObjId, long fileObjId, Long arti
 		String insertDescriptionSql = getSqlIgnoreConflict(tableValuesClause);
 
 		caseDB.acquireSingleUserCaseWriteLock();
-		try (PreparedStatement insertDescriptionStmt = connection.prepareStatement(insertDescriptionSql, PreparedStatement.RETURN_GENERATED_KEYS)) {
+		try {
+			PreparedStatement insertDescriptionStmt = connection.getPreparedStatement(insertDescriptionSql, PreparedStatement.RETURN_GENERATED_KEYS);
 			insertDescriptionStmt.clearParameters();
 			insertDescriptionStmt.setLong(1, dataSourceObjId);
 			insertDescriptionStmt.setLong(2, fileObjId);
@@ -495,22 +501,14 @@ private long addEventDescription(long dataSourceObjId, long fileObjId, Long arti
 			// if no inserted rows, there is a conflict due to a duplicate event 
 			// description.  If that happens, return null as no id was inserted.
 			if (row < 1) {
-				throw new DuplicateException(String.format(
-						"An event description already exists for [fullDescription: %s, contentId: %d, artifactId: %s]",
-						fullDescription == null ? "<null>" : fullDescription,
-						fileObjId,
-						artifactID == null ? "<null>" : Long.toString(artifactID)));
+				return null;
 			}
 
 			try (ResultSet generatedKeys = insertDescriptionStmt.getGeneratedKeys()) {
 				if (generatedKeys.next()) {
 					return generatedKeys.getLong(1);
 				} else {
-					throw new DuplicateException(String.format(
-							"An event description already exists for [fullDescription: %s, contentId: %d, artifactId: %s]",
-							fullDescription == null ? "<null>" : fullDescription,
-							fileObjId,
-							artifactID == null ? "<null>" : Long.toString(artifactID)));
+					return null;
 				}
 			}
 		} catch (SQLException ex) {
@@ -519,6 +517,45 @@ private long addEventDescription(long dataSourceObjId, long fileObjId, Long arti
 			caseDB.releaseSingleUserCaseWriteLock();
 		}
 	}
+	
+	/**
+	 * Returns an event description id for an existing event.
+	 * 
+	 * @param dataSourceObjId	Existing data source object id
+	 * @param fileObjId			Existing content object id
+	 * @param artifactID		Existing artifact id
+	 * @param fullDescription	Full event description
+	 * @param connection		Database connection
+	 * 
+	 * @return The id of an existing description or null if none what found.
+	 * 
+	 * @throws TskCoreException 
+	 */
+	private Long getEventDescription(long dataSourceObjId, long fileObjId, Long artifactID,
+			String fullDescription, CaseDbConnection connection) throws TskCoreException {
+
+		String query = "SELECT event_description_id FROM tsk_event_descriptions "
+				+ "WHERE data_source_obj_id = " + dataSourceObjId
+				+ " AND content_obj_id = " + fileObjId
+				+ " AND artifact_id " + (artifactID != null ? " = " + artifactID : "IS null")
+				+ " AND full_description " + (fullDescription != null ? "= '"
+					+ SleuthkitCase.escapeSingleQuotes(fullDescription) + "'" : "IS null");
+
+		caseDB.acquireSingleUserCaseReadLock();
+		try (ResultSet resultSet = connection.createStatement().executeQuery(query)) {
+
+			if (resultSet.next()) {
+				long id = resultSet.getLong(1);
+				return id;
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException(String.format("Failed to get description, dataSource=%d, fileObjId=%d, artifactId=%d", dataSourceObjId, fileObjId, artifactID), ex);
+		} finally {
+			caseDB.releaseSingleUserCaseReadLock();
+		}
+
+		return null;
+	}
 
 	Collection<TimelineEvent> addEventsForNewFile(AbstractFile file, CaseDbConnection connection) throws TskCoreException {
 		Set<TimelineEvent> events = addEventsForNewFileQuiet(file, connection);
@@ -563,27 +600,34 @@ Set<TimelineEvent> addEventsForNewFileQuiet(AbstractFile file, CaseDbConnection
 		Set<TimelineEvent> events = new HashSet<>();
 		caseDB.acquireSingleUserCaseWriteLock();
 		try {
-			long descriptionID = addEventDescription(file.getDataSourceObjectId(), fileObjId, null,
+			Long descriptionID = addEventDescription(file.getDataSourceObjectId(), fileObjId, null,
 					description, null, null, false, false, connection);
-
-			for (Map.Entry<TimelineEventType, Long> timeEntry : timeMap.entrySet()) {
-				Long time = timeEntry.getValue();
-				if (time > 0 && time < MAX_TIMESTAMP_TO_ADD) {// if the time is legitimate ( greater than zero and less then 12 years from current date) insert it
-					TimelineEventType type = timeEntry.getKey();
-					long eventID = addEventWithExistingDescription(time, type, descriptionID, connection);
-
-					/*
-					 * Last two flags indicating hasTags and hasHashHits are
-					 * both set to false with the assumption that this is not
-					 * possible for a new file. See JIRA-5407
-					 */
-					events.add(new TimelineEvent(eventID, descriptionID, fileObjId, null, time, type,
-							description, null, null, false, false));
-				} else {
-					if (time >= MAX_TIMESTAMP_TO_ADD) {
-						logger.log(Level.WARNING, String.format("Date/Time discarded from Timeline for %s for file %s with Id %d", timeEntry.getKey().getDisplayName(), file.getParentPath() + file.getName(), file.getId()));
+			
+			if(descriptionID == null) {
+				descriptionID = getEventDescription(file.getDataSourceObjectId(), fileObjId, null, description, connection);
+			}
+			if(descriptionID != null) {
+				for (Map.Entry<TimelineEventType, Long> timeEntry : timeMap.entrySet()) {
+					Long time = timeEntry.getValue();
+					if (time > 0 && time < MAX_TIMESTAMP_TO_ADD) {// if the time is legitimate ( greater than zero and less then 12 years from current date) insert it
+						TimelineEventType type = timeEntry.getKey();
+						long eventID = addEventWithExistingDescription(time, type, descriptionID, connection);
+
+						/*
+						 * Last two flags indicating hasTags and hasHashHits are
+						 * both set to false with the assumption that this is not
+						 * possible for a new file. See JIRA-5407
+						 */
+						events.add(new TimelineEvent(eventID, descriptionID, fileObjId, null, time, type,
+								description, null, null, false, false));
+					} else {
+						if (time >= MAX_TIMESTAMP_TO_ADD) {
+							logger.log(Level.WARNING, String.format("Date/Time discarded from Timeline for %s for file %s with Id %d", timeEntry.getKey().getDisplayName(), file.getParentPath() + file.getName(), file.getId()));
+						}
 					}
-				}
+				} 
+			} else {
+				throw new TskCoreException(String.format("Failed to get event description for file id = %d", fileObjId));
 			}
 		} catch (DuplicateException dupEx) {
 			logger.log(Level.SEVERE, "Attempt to make file event duplicate.", dupEx);
@@ -774,7 +818,12 @@ private Optional<TimelineEvent> addArtifactEvent(TimelineEventDescriptionWithTim
 		String shortDescription = eventPayload.getDescription(TimelineLevelOfDetail.LOW);
 		long artifactID = artifact.getArtifactID();
 		long fileObjId = artifact.getObjectID();
-		long dataSourceObjectID = artifact.getDataSourceObjectID();
+		Long dataSourceObjectID = artifact.getDataSourceObjectID();
+		
+		if(dataSourceObjectID == null) {
+			logger.log(Level.SEVERE, String.format("Failed to create timeline event for artifact (%d), artifact data source was null"), artifact.getId());
+			return Optional.empty();
+		}
 
 		AbstractFile file = caseDB.getAbstractFileById(fileObjId);
 		boolean hasHashHits = false;
@@ -788,15 +837,24 @@ private Optional<TimelineEvent> addArtifactEvent(TimelineEventDescriptionWithTim
 		caseDB.acquireSingleUserCaseWriteLock();
 		try (CaseDbConnection connection = caseDB.getConnection();) {
 
-			long descriptionID = addEventDescription(dataSourceObjectID, fileObjId, artifactID,
-					fullDescription, medDescription, shortDescription,
-					hasHashHits, tagged, connection);
-
-			long eventID = addEventWithExistingDescription(time, eventType, descriptionID, connection);
-
-			event = new TimelineEvent(eventID, dataSourceObjectID, fileObjId, artifactID,
-					time, eventType, fullDescription, medDescription, shortDescription,
-					hasHashHits, tagged);
+			Long descriptionID = addEventDescription(dataSourceObjectID, fileObjId, artifactID,
+				fullDescription, medDescription, shortDescription,
+				hasHashHits, tagged, connection);
+			
+			if(descriptionID == null) {
+				descriptionID = getEventDescription(dataSourceObjectID, fileObjId, artifactID,
+					fullDescription, connection);
+			} 
+
+			if(descriptionID != null) {
+				long eventID = addEventWithExistingDescription(time, eventType, descriptionID, connection);
+
+				event = new TimelineEvent(eventID, dataSourceObjectID, fileObjId, artifactID,
+						time, eventType, fullDescription, medDescription, shortDescription,
+						hasHashHits, tagged);
+			} else {
+				throw new TskCoreException(String.format("Failed to get event description for file id = %d, artifactId %d", fileObjId, artifactID));
+			}
 
 		} finally {
 			caseDB.releaseSingleUserCaseWriteLock();
@@ -811,7 +869,8 @@ private long addEventWithExistingDescription(Long time, TimelineEventType type,
 		String insertEventSql = getSqlIgnoreConflict(tableValuesClause);
 
 		caseDB.acquireSingleUserCaseWriteLock();
-		try (PreparedStatement insertRowStmt = connection.prepareStatement(insertEventSql, Statement.RETURN_GENERATED_KEYS);) {
+		try {
+			PreparedStatement insertRowStmt = connection.getPreparedStatement(insertEventSql, Statement.RETURN_GENERATED_KEYS);
 			insertRowStmt.clearParameters();
 			insertRowStmt.setLong(1, type.getTypeID());
 			insertRowStmt.setLong(2, descriptionID);
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TskCaseDbBridge.java b/bindings/java/src/org/sleuthkit/datamodel/TskCaseDbBridge.java
index cee51945acc4b4f0b86cc06ab7dcab0e29548c2d..b164210aabd23194fbdeed13d0d54c966270fc9f 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TskCaseDbBridge.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TskCaseDbBridge.java
@@ -18,6 +18,7 @@
  */
 package org.sleuthkit.datamodel;
 
+import com.google.common.base.Strings;
 import java.sql.PreparedStatement;
 import java.sql.SQLException;
 import java.sql.Statement;
@@ -26,12 +27,15 @@
 import java.util.Arrays;
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.Iterator;
 import java.util.LinkedList;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Optional;
 import java.util.Queue;
 import java.util.logging.Level;
 import java.util.logging.Logger;
+import org.sleuthkit.datamodel.OsAccountManager.NotUserSIDException;
 import org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction;
 
 /**
@@ -50,19 +54,23 @@ class TskCaseDbBridge {
     private final SleuthkitCase caseDb;
     private CaseDbTransaction trans = null;
     private final AddDataSourceCallbacks addDataSourceCallbacks;
+	private final Host imageHost;
     
     private final Map<Long, Long> fsIdToRootDir = new HashMap<>();
     private final Map<Long, TskData.TSK_FS_TYPE_ENUM> fsIdToFsType = new HashMap<>();
     private final Map<ParentCacheKey, Long> parentDirCache = new HashMap<>();
     
+    private final Map<String, OsAccount> ownerIdToAccountMap = new HashMap<>();
+	
     private static final long BATCH_FILE_THRESHOLD = 500;
     private final Queue<FileInfo> batchedFiles = new LinkedList<>();
     private final Queue<LayoutRangeInfo> batchedLayoutRanges = new LinkedList<>();
     private final List<Long> layoutFileIds = new ArrayList<>();
     
-    TskCaseDbBridge(SleuthkitCase caseDb, AddDataSourceCallbacks addDataSourceCallbacks) {
+    TskCaseDbBridge(SleuthkitCase caseDb, AddDataSourceCallbacks addDataSourceCallbacks, Host host) {
         this.caseDb = caseDb;
         this.addDataSourceCallbacks = addDataSourceCallbacks;
+		imageHost = host;
         trans = null;
     }
     
@@ -123,6 +131,7 @@ void finish() {
      * @param sha256      SHA256 hash.
      * @param deviceId    Device ID.
      * @param collectionDetails  The collection details.
+     * @param paths       Data source path(s)
      * 
      * @return The object ID of the new image or -1 if an error occurred
      */
@@ -306,6 +315,7 @@ long addFileSystem(long parentObjId, long imgOffset, int fsType, long blockSize,
      * @param seq         The sequence number from fs_file->meta->seq. 
      * @param parMetaAddr The metadata address of the parent
      * @param parSeq      The parent sequence number if NTFS, -1 otherwise.
+	 * @param ownerUid	  String uid of the file owner.  May be an empty string.
      * 
      * @return 0 if successful, -1 if not
      */
@@ -319,7 +329,7 @@ long addFile(long parentObjId,
         long crtime, long ctime, long atime, long mtime,
         int meta_mode, int gid, int uid,
         String escaped_path, String extension, 
-        long seq, long parMetaAddr, long parSeq) {
+        long seq, long parMetaAddr, long parSeq, String ownerUid) {
         
         // Add the new file to the list
         batchedFiles.add(new FileInfo(parentObjId,
@@ -332,7 +342,7 @@ long addFile(long parentObjId,
                 crtime, ctime, atime, mtime,
                 meta_mode, gid, uid,
                 escaped_path, extension,
-                seq, parMetaAddr, parSeq));
+                seq, parMetaAddr, parSeq, ownerUid));
         
         // Add the current files to the database if we've exceeded the threshold or if we
         // have the root folder.
@@ -351,6 +361,43 @@ long addFile(long parentObjId,
     private long addBatchedFilesToDb() {
         List<Long> newObjIds = new ArrayList<>();
         try {
+			
+			// loop through the batch, and make sure owner accounts exist for all the files in the batch.
+			// If not, create accounts.
+			Iterator<FileInfo> it = batchedFiles.iterator();
+
+			while (it.hasNext()) {
+				FileInfo fileInfo = it.next();
+				String ownerUid = fileInfo.ownerUid;
+				if (Strings.isNullOrEmpty(fileInfo.ownerUid) == false)  { 
+					// first check the owner id is in the map, if found, then continue
+					if (this.ownerIdToAccountMap.containsKey(ownerUid)) {
+						continue;
+					}
+
+					// query the DB to get the owner account
+					try {
+						Optional<OsAccount> ownerAccount = caseDb.getOsAccountManager().getWindowsOsAccount(ownerUid, null, null, imageHost);
+						if (ownerAccount.isPresent()) {
+							// found account - add to map 
+							ownerIdToAccountMap.put(ownerUid, ownerAccount.get());
+						} else {
+							// account not found in the database,  create the account and add to map
+							// Currently we expect only NTFS systems to provide a windows style SID as owner id.
+							OsAccountManager accountMgr = caseDb.getOsAccountManager();
+							OsAccount newAccount = accountMgr.newWindowsOsAccount(ownerUid, null, null, imageHost, OsAccountRealm.RealmScope.UNKNOWN);
+							accountMgr.newOsAccountInstance(newAccount.getId(), fileInfo.dataSourceObjId, OsAccountInstance.OsAccountInstanceType.LAUNCHED, caseDb.getConnection());
+							ownerIdToAccountMap.put(ownerUid, newAccount);
+						}
+					} catch (NotUserSIDException ex) {
+						// if the owner SID is not a user SID, set the owner account to null
+						ownerIdToAccountMap.put(ownerUid, null);
+					}
+				}
+			}
+			
+			
+					
             beginTransaction();
             FileInfo fileInfo;
             while ((fileInfo = batchedFiles.poll()) != null) {
@@ -361,6 +408,27 @@ private long addBatchedFilesToDb() {
                         computedParentObjId = getParentObjId(fileInfo);
                     }
 
+					Long ownerAccountObjId = OsAccount.NO_ACCOUNT;
+					if (Strings.isNullOrEmpty(fileInfo.ownerUid) == false) { 
+						if (ownerIdToAccountMap.containsKey(fileInfo.ownerUid)) {
+							// for any non user SIDs, the map will have a null for account
+							if (Objects.nonNull(ownerIdToAccountMap.get(fileInfo.ownerUid))) {
+							    ownerAccountObjId = ownerIdToAccountMap.get(fileInfo.ownerUid).getId();
+							}
+						} else {
+							// Error - the map should have an account or a null at this point for the owner SID.
+							throw new TskCoreException(String.format("Failed to add file. Owner account not found for file with parent object ID: %d, name: %s, owner id: %s", fileInfo.parentObjId, fileInfo.name, fileInfo.ownerUid));
+						}
+					}
+					
+					// We've seen a case where the root folder comes in with an undefined meta type.
+					// In that case, we alter the type to TSK_FS_META_TYPE_DIR so it will be cached
+					// properly and will not cause errors later for being an unexpected type.
+					if ((fileInfo.parentObjId == fileInfo.fsObjId)
+							&& (fileInfo.metaType == TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_UNDEF.getValue())) {
+						fileInfo.metaType = TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue();
+					}
+					
                     long objId = addFileToDb(computedParentObjId, 
                         fileInfo.fsObjId, fileInfo.dataSourceObjId,
                         fileInfo.fsType,
@@ -371,7 +439,7 @@ private long addBatchedFilesToDb() {
                         fileInfo.crtime, fileInfo.ctime, fileInfo.atime, fileInfo.mtime,
                         fileInfo.meta_mode, fileInfo.gid, fileInfo.uid,
                         null, TskData.FileKnown.UNKNOWN,
-                        fileInfo.escaped_path, fileInfo.extension, 
+                        fileInfo.escaped_path, fileInfo.extension, fileInfo.ownerUid, ownerAccountObjId,
                         false, trans);
                     if (fileInfo.fsObjId != fileInfo.parentObjId) {
                         // Add new file ID to the list to send to ingest unless it is the root folder
@@ -383,7 +451,7 @@ private long addBatchedFilesToDb() {
                         fsIdToRootDir.put(fileInfo.fsObjId, objId);
                     }
 
-                    // If the file is a directory, cache the object ID
+                    // If the file is a directory, cache the object ID.
                     if ((fileInfo.metaType == TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR.getValue()
                             || (fileInfo.metaType == TskData.TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_VIRT_DIR.getValue()))
                             && (fileInfo.name != null)
@@ -486,7 +554,7 @@ long addLayoutFile(long parentObjId,
                 null, null, null, null,
                 null, null, null,
                 null, TskData.FileKnown.UNKNOWN,
-                null, null, 
+                null, null, null, OsAccount.NO_ACCOUNT,
                 true, trans);
             commitTransaction();
 
@@ -606,9 +674,9 @@ private class ParentCacheKey {
         ParentCacheKey(long fsObjId, long metaAddr, long seqNum, String path) {
             this.fsObjId = fsObjId;
             this.metaAddr = metaAddr;
-            if (fsIdToFsType.containsKey(fsObjId) 
-                    && (fsIdToFsType.get(fsObjId).equals(TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS)
-                        || fsIdToFsType.get(fsObjId).equals(TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS_DETECT))) {
+            if (ownerIdToAccountMap.containsKey(fsObjId) 
+                    && (ownerIdToAccountMap.get(fsObjId).equals(TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS)
+                        || ownerIdToAccountMap.get(fsObjId).equals(TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_NTFS_DETECT))) {
                 this.seqNum = seqNum;
             } else {
                 this.seqNum = 0;
@@ -692,6 +760,7 @@ private class FileInfo {
         long seq;
         long parMetaAddr;
         long parSeq;
+		String ownerUid;
         
         FileInfo(long parentObjId, 
             long fsObjId, long dataSourceObjId,
@@ -703,7 +772,7 @@ private class FileInfo {
             long crtime, long ctime, long atime, long mtime,
             int meta_mode, int gid, int uid,
             String escaped_path, String extension, 
-            long seq, long parMetaAddr, long parSeq) {
+            long seq, long parMetaAddr, long parSeq, String ownerUid) {
             
             this.parentObjId = parentObjId;
             this.fsObjId = fsObjId;
@@ -731,6 +800,7 @@ private class FileInfo {
             this.seq = seq;
             this.parMetaAddr = parMetaAddr;
             this.parSeq = parSeq;
+			this.ownerUid = ownerUid;
         }
     }
 	
@@ -768,6 +838,8 @@ private class FileInfo {
 	 * @param known           The file known status.
 	 * @param escaped_path    The escaped path to the file.
 	 * @param extension       The file extension.
+	 * @param ownerUid        Unique id of the file owner.
+	 * @param ownerAcctObjId  Object id of the owner account.
 	 * @param hasLayout       True if this is a layout file, false otherwise.
 	 * @param transaction     The open transaction.
 	 *
@@ -785,8 +857,8 @@ private long addFileToDb(long parentObjId,
 			Long crtime, Long ctime, Long atime, Long mtime,
 			Integer meta_mode, Integer gid, Integer uid,
 			String md5, TskData.FileKnown known,
-			String escaped_path, String extension,
-			boolean hasLayout, CaseDbTransaction transaction) throws TskCoreException {
+			String escaped_path, String extension, String ownerUid, Long ownerAcctObjId,
+			boolean hasLayout,  CaseDbTransaction transaction) throws TskCoreException {
 
 		try {
 			SleuthkitCase.CaseDbConnection connection = transaction.getConnection();
@@ -794,9 +866,9 @@ private long addFileToDb(long parentObjId,
 			// Insert a row for the local/logical file into the tsk_objects table.
 			// INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)
 			long objectId = caseDb.addObject(parentObjId, TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection);
-			
-			String fileInsert = "INSERT INTO tsk_files (fs_obj_id, obj_id, data_source_obj_id, type, attr_type, attr_id, name, meta_addr, meta_seq, dir_type, meta_type, dir_flags, meta_flags, size, crtime, ctime, atime, mtime, mode, gid, uid, md5, known, parent_path, extension, has_layout)"
-				+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; // NON-NLS
+				
+			String fileInsert = "INSERT INTO tsk_files (fs_obj_id, obj_id, data_source_obj_id, type, attr_type, attr_id, name, meta_addr, meta_seq, dir_type, meta_type, dir_flags, meta_flags, size, crtime, ctime, atime, mtime, mode, gid, uid, md5, known, parent_path, extension, has_layout, owner_uid, os_account_obj_id)"
+				+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"; // NON-NLS
 			PreparedStatement preparedStatement = connection.getPreparedStatement(fileInsert, Statement.NO_GENERATED_KEYS);			
 			preparedStatement.clearParameters();
 			
@@ -878,6 +950,15 @@ private long addFileToDb(long parentObjId,
 			} else {
 				preparedStatement.setNull(26, java.sql.Types.INTEGER);
 			}
+			
+			preparedStatement.setString(27, ownerUid); // ownerUid
+			
+			if (ownerAcctObjId != OsAccount.NO_ACCOUNT) {
+				preparedStatement.setLong(28, ownerAcctObjId); //
+			} else {
+				preparedStatement.setNull(28, java.sql.Types.BIGINT);
+			}
+			
 			connection.executeUpdate(preparedStatement);
 
 			// If this is not a slack file create the timeline events
@@ -890,7 +971,7 @@ private long addFileToDb(long parentObjId,
 						TskData.TSK_FS_META_TYPE_ENUM.valueOf((short) metaType),
 						TskData.TSK_FS_NAME_FLAG_ENUM.valueOf(dirFlags),
 						(short) metaFlags,
-						size, ctime, crtime, atime, mtime, null, null, null, escaped_path, null, parentObjId, null, null, extension);
+						size, ctime, crtime, atime, mtime, null, null, null, escaped_path, null, parentObjId, null, null, extension, ownerUid, ownerAcctObjId);
 
 				timelineManager.addEventsForNewFileQuiet(derivedFile, connection);
 			}
@@ -913,6 +994,7 @@ private long addFileToDb(long parentObjId,
 	 * @param sha256            SHA256 hash.
 	 * @param deviceId          Device ID.
 	 * @param collectionDetails Collection details.
+	 * @param hostId            The ID of a host already in the database.
 	 * @param transaction       Case DB transaction.
 	 *
 	 * @return The newly added Image object ID.
@@ -948,13 +1030,14 @@ private long addImageToDb(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long
 			connection.executeUpdate(preparedStatement);
 
 			// Add a row to data_source_info
-			String dataSourceInfoSql = "INSERT INTO data_source_info (obj_id, device_id, time_zone, acquisition_details) VALUES (?, ?, ?, ?)"; // NON-NLS
+			String dataSourceInfoSql = "INSERT INTO data_source_info (obj_id, device_id, time_zone, acquisition_details, host_id) VALUES (?, ?, ?, ?, ?)"; // NON-NLS
 			preparedStatement = connection.getPreparedStatement(dataSourceInfoSql, Statement.NO_GENERATED_KEYS);
 			preparedStatement.clearParameters();
 			preparedStatement.setLong(1, newObjId);
 			preparedStatement.setString(2, deviceId);
 			preparedStatement.setString(3, timezone);
 			preparedStatement.setString(4, collectionDetails);
+			preparedStatement.setLong(5, imageHost.getHostId());
 			connection.executeUpdate(preparedStatement);
 
 			return newObjId;
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TskData.java b/bindings/java/src/org/sleuthkit/datamodel/TskData.java
index 6ff21a0a03cb007cc49d3d6fe5fa3579307ce8c2..805770328239cff06fe6536eecb206862ede928b 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TskData.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TskData.java
@@ -632,9 +632,12 @@ public enum ObjectType {
 		ABSTRACTFILE(4), ///< File - see tsk_files for more details
 		ARTIFACT(5),	/// Artifact - see blackboard_artifacts for more details
 		REPORT(6),	///< Report - see reports for more details
-		POOL(7)	///< Pool
+		POOL(7),	///< Pool
+		OS_ACCOUNT(8), ///< OS Account - see tsk_os_accounts for more details
+		HOST_ADDRESS(9), ///< Host Address - see tsk_host_addresses for more details
+		UNSUPPORTED(-1) ///< Unsupported type
 		; 
-		private short objectType;
+		private final short objectType;
 
 		private ObjectType(int objectType) {
 			this.objectType = (short) objectType;
@@ -662,8 +665,7 @@ public static ObjectType valueOf(short objectType) {
 					return v;
 				}
 			}
-			throw new IllegalArgumentException(
-					MessageFormat.format(bundle.getString("TskData.objectTypeEnum.exception.msg1.text"), objectType));
+			return UNSUPPORTED;
 		}
 	}
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TskEvent.java b/bindings/java/src/org/sleuthkit/datamodel/TskEvent.java
new file mode 100755
index 0000000000000000000000000000000000000000..d67686c466ace311eec1d2da324fbdd517d4929f
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/TskEvent.java
@@ -0,0 +1,496 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2020-2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.collect.ImmutableSet;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+
+/**
+ * Data model events.
+ */
+public interface TskEvent {
+
+	/**
+	 * Gets the data source guaranteed to be associated with the event, if
+	 * applicable.
+	 *
+	 * @return The object ID of the data source associated with the event, if
+	 *         specified.
+	 */
+	default Optional<Long> getDataSourceId() {
+		return Optional.ofNullable(null);
+	}
+
+	/**
+	 * An abstract super class for data model events for one or more data module
+	 * objects.
+	 *
+	 * @param <T> The type of data model object that is the subject of the
+	 *            event.
+	 */
+	abstract static class TskObjectsEvent<T> implements TskEvent {
+
+		private final List<T> dataModelObjects;
+
+		/**
+		 * Constructs the super class part for data model events for one or more
+		 * data module objects.
+		 *
+		 * @param dataModelObjects The data model objects that are the subjects
+		 *                         of the event.
+		 */
+		TskObjectsEvent(List<T> dataModelObjects) {
+			this.dataModelObjects = new ArrayList<>();
+			this.dataModelObjects.addAll(dataModelObjects);
+		}
+
+		/**
+		 * Gets the data model objects that are the subjects of the event.
+		 *
+		 * @return The data model objects.
+		 */
+		List<T> getDataModelObjects() {
+			return Collections.unmodifiableList(dataModelObjects);
+		}
+
+	}
+
+	/**
+	 * An event published when the aggregate scores of one or more data model
+	 * objects change.
+	 */
+	public final static class AggregateScoresChangedEvent extends TskObjectsEvent<ScoreChange> {
+
+		private final Long dataSourceObjectId;
+
+		/**
+		 * Constructs an event published when the aggregate scores of one or
+		 * more data model objects change.
+		 *
+		 * @param scoreChanges The score changes, must not be empty.
+		 */
+		AggregateScoresChangedEvent(Long dataSourceObjectId, ImmutableSet<ScoreChange> scoreChanges) {
+			super(scoreChanges.asList());
+			this.dataSourceObjectId = dataSourceObjectId;
+			scoreChanges.stream().forEach(chg -> {
+				if (!chg.getDataSourceObjectId().equals(dataSourceObjectId)) {
+					throw new IllegalArgumentException("All data source object IDs in List<ScoreChange> must match dataSourceObjectId");
+				}
+			});
+		}
+
+		@Override
+		public Optional<Long> getDataSourceId() {
+			return Optional.ofNullable(dataSourceObjectId);
+		}
+
+		/**
+		 * Gets the score changes.
+		 *
+		 * @return The score changes.
+		 */
+		public List<ScoreChange> getScoreChanges() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more analysis results are deleted.
+	 */
+	public final static class AnalysisResultsDeletedTskEvent extends TskObjectsEvent<Long> {
+
+		/**
+		 * Constructs an event published when one or more analysis results are
+		 * deleted.
+		 *
+		 * @param deletedResultObjIds The TSK object IDs of the deleted analysis
+		 *                            results.
+		 */
+		AnalysisResultsDeletedTskEvent(List<Long> deletedResultObjIds) {
+			super(deletedResultObjIds);
+		}
+
+		/**
+		 * Gets the TSK object IDs of the deleted analysis results.
+		 *
+		 * @return The TSK object IDs.
+		 */
+		public List<Long> getAnalysisResultObjectIds() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An abstract super class for host events.
+	 */
+	abstract static class HostsTskEvent extends TskObjectsEvent<Host> {
+
+		/**
+		 * Constructs the super class part for a host event.
+		 *
+		 * @param hosts The hosts that are the subjects of the event.
+		 */
+		HostsTskEvent(List<Host> hosts) {
+			super(hosts);
+		}
+
+		/**
+		 * Gets the hosts.
+		 *
+		 * @return The hosts.
+		 */
+		public List<Host> getHosts() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more hosts are added.
+	 */
+	public final static class HostsAddedTskEvent extends HostsTskEvent {
+
+		/**
+		 * Constructs an event published when one or more hosts are added.
+		 *
+		 * @param hosts The hosts.
+		 */
+		HostsAddedTskEvent(List<Host> hosts) {
+			super(hosts);
+		}
+
+	}
+
+	/**
+	 * An event published when one or more hosts are updated.
+	 */
+	public final static class HostsUpdatedTskEvent extends HostsTskEvent {
+
+		/**
+		 * Constructs an event published when one or more hosts are updated.
+		 *
+		 * @param hosts The hosts.
+		 */
+		HostsUpdatedTskEvent(List<Host> hosts) {
+			super(hosts);
+		}
+
+	}
+
+	/**
+	 * An event published when one or more hosts are deleted.
+	 */
+	public final static class HostsDeletedTskEvent extends TskObjectsEvent<Long> {
+
+		/**
+		 * Constructs an event published when one or more hosts are deleted.
+		 *
+		 * @param hostIds The host IDs of the deleted hosts.
+		 */
+		HostsDeletedTskEvent(List<Long> hostIds) {
+			super(hostIds);
+		}
+
+		/**
+		 * Gets the host IDs of the deleted hosts.
+		 *
+		 * @return The host IDs.
+		 */
+		public List<Long> getHostIds() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An abstract super class for OS account events.
+	 */
+	abstract static class OsAccountsTskEvent extends TskObjectsEvent<OsAccount> {
+
+		/**
+		 * Constructs the super class part of an OS account event.
+		 *
+		 * @param hosts The OS accounts that are the subjects of the event.
+		 */
+		OsAccountsTskEvent(List<OsAccount> osAccounts) {
+			super(osAccounts);
+		}
+
+		/**
+		 * Gets the OS accounts.
+		 *
+		 * @return The OS accounts.
+		 */
+		public List<OsAccount> getOsAcounts() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more OS accounts are added.
+	 */
+	public final static class OsAccountsAddedTskEvent extends OsAccountsTskEvent {
+
+		/**
+		 * Constructs an event published when one or more OS accounts are added.
+		 *
+		 * @param osAccounts The OS accounts.
+		 */
+		OsAccountsAddedTskEvent(List<OsAccount> osAccounts) {
+			super(osAccounts);
+		}
+
+	}
+
+	/**
+	 * An event published when one or more OS accounts are updated.
+	 */
+	public final static class OsAccountsUpdatedTskEvent extends OsAccountsTskEvent {
+
+		/**
+		 * Constructs an event published when OS accounts are updated.
+		 *
+		 * @param osAccounts The OS accounts.
+		 */
+		OsAccountsUpdatedTskEvent(List<OsAccount> osAccounts) {
+			super(osAccounts);
+		}
+
+	}
+
+	/**
+	 * An event published when one or more OS accounts are deleted.
+	 */
+	public final static class OsAccountsDeletedTskEvent extends TskObjectsEvent<Long> {
+
+		/**
+		 * Constructs an event published when one or more OS accounts are
+		 * deleted.
+		 *
+		 * @param accountList The object IDs of the deleted OS accounts.
+		 */
+		OsAccountsDeletedTskEvent(List<Long> accountObjectIds) {
+			super(accountObjectIds);
+		}
+
+		/**
+		 * Gets the TSK object IDs of the deleted OS accounts.
+		 *
+		 * @return The TSK object IDs.
+		 */
+		public List<Long> getOsAccountObjectIds() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more OS account instances are added.
+	 */
+	public final static class OsAcctInstancesAddedTskEvent extends TskObjectsEvent<OsAccountInstance> {
+
+		/**
+		 * Constructs an event published when one or more OS account instances
+		 * are added.
+		 *
+		 * @param hosts The OS account instances that are the subjects of the
+		 *              event.
+		 */
+		OsAcctInstancesAddedTskEvent(List<OsAccountInstance> osAcctInstances) {
+			super(osAcctInstances);
+		}
+
+		/**
+		 * Gets the OS account instances.
+		 *
+		 * @return The OS account instances.
+		 */
+		public List<OsAccountInstance> getOsAccountInstances() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An abstract super class for person events.
+	 */
+	static abstract class PersonsTskEvent extends TskObjectsEvent<Person> {
+
+		/**
+		 * Constructs the super class part of a person event.
+		 *
+		 * @param persons The persons that are the subjects of the event.
+		 */
+		PersonsTskEvent(List<Person> persons) {
+			super(persons);
+		}
+
+		/**
+		 * Gets the persons.
+		 *
+		 * @return The persons.
+		 */
+		public List<Person> getPersons() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more persons are added.
+	 */
+	public final static class PersonsAddedTskEvent extends PersonsTskEvent {
+
+		/**
+		 * Constructs an event published when one or more persons are added.
+		 *
+		 * @param persons The persons.
+		 */
+		PersonsAddedTskEvent(List<Person> persons) {
+			super(persons);
+		}
+
+	}
+
+	/**
+	 * An event published when one or more persons are updated.
+	 */
+	public final static class PersonsUpdatedTskEvent extends PersonsTskEvent {
+
+		/**
+		 * Constructs an event published when one or more persons are updated.
+		 *
+		 * @param persons The persons.
+		 */
+		PersonsUpdatedTskEvent(List<Person> persons) {
+			super(persons);
+		}
+
+	}
+
+	/**
+	 * An event published when one or more persons are deleted.
+	 */
+	public final static class PersonsDeletedTskEvent extends TskObjectsEvent<Long> {
+
+		/**
+		 * Constructs an event published when one or more persons are deleted.
+		 *
+		 * @param persons The persons.
+		 */
+		PersonsDeletedTskEvent(List<Long> personObjectIDs) {
+			super(personObjectIDs);
+		}
+
+		/**
+		 * Gets the person IDs of the deleted persons.
+		 *
+		 * @return The person IDs.
+		 */
+		public List<Long> getPersonIds() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more hosts are added to a person.
+	 */
+	public final static class HostsAddedToPersonTskEvent extends TskObjectsEvent<Host> {
+
+		private final Person person;
+
+		/**
+		 * Constructs the super class part of a person and host association
+		 * change event.
+		 *
+		 * @param person The person that is the subject of the event.
+		 * @param hosts  The hosts that are the subjects of the event.
+		 */
+		HostsAddedToPersonTskEvent(Person person, List<Host> hosts) {
+			super(hosts);
+			this.person = person;
+		}
+
+		/**
+		 * Gets the person.
+		 *
+		 * @return The person.
+		 */
+		public Person getPerson() {
+			return person;
+		}
+
+		/**
+		 * Gets the hosts.
+		 *
+		 * @return The hosts.
+		 */
+		public List<Host> getHosts() {
+			return getDataModelObjects();
+		}
+
+	}
+
+	/**
+	 * An event published when one or more hosts are removed from a person.
+	 */
+	public final static class HostsRemovedFromPersonTskEvent extends TskObjectsEvent<Long> {
+
+		private final Person person;
+
+		/**
+		 * Contructs an event published when one or more hosts are removed from
+		 * a person.
+		 *
+		 * @param person  The person.
+		 * @param hostIds The host IDs of the hosts.
+		 */
+		HostsRemovedFromPersonTskEvent(Person person, List<Long> hostIds) {
+			super(hostIds);
+			this.person = person;
+		}
+
+		/**
+		 * Gets the person.
+		 *
+		 * @return The person.
+		 */
+		public Person getPerson() {
+			return person;
+		}
+
+		/**
+		 * Gets the host IDs of the deleted hosts.
+		 *
+		 * @return The host IDs.
+		 */
+		public List<Long> getHostIds() {
+			return getDataModelObjects();
+		}
+
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/UnsupportedContent.java b/bindings/java/src/org/sleuthkit/datamodel/UnsupportedContent.java
new file mode 100644
index 0000000000000000000000000000000000000000..d9bbca1bdd7b59ada02ad29b51d762789b4ceedd
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/UnsupportedContent.java
@@ -0,0 +1,63 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+/**
+ * This content type is used as a default when the object type from the
+ * tsk_objects table is not present in the TskData.ObjectType enum. This should
+ * only come into play when loading case databases created by a newer version of
+ * Autopsy.
+ */
+public class UnsupportedContent extends AbstractContent {
+
+	/**
+	 * Create an UnsupportedContent object. Only store the object id.
+	 *
+	 * @param db     case database handle
+	 * @param obj_id object id
+	 */
+	protected UnsupportedContent(SleuthkitCase db, long obj_id) {
+		super(db, obj_id, "Unsupported Content");
+	}
+
+	@Override
+	public int read(byte[] buf, long offset, long len) throws TskCoreException {
+		return 0;
+	}
+
+	@Override
+	public void close() {
+		// Do nothing
+	}
+
+	@Override
+	public long getSize() {
+		return 0;
+	}
+
+	@Override
+	public <T> T accept(ContentVisitor<T> v) {
+		return v.visit(this);
+	}
+
+	@Override
+	public <T> T accept(SleuthkitItemVisitor<T> v) {
+		return v.visit(this);
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/WindowsAccountUtils.java b/bindings/java/src/org/sleuthkit/datamodel/WindowsAccountUtils.java
new file mode 100644
index 0000000000000000000000000000000000000000..0f1a9d37e8824b86426c4afb6377a0ba695e010b
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/WindowsAccountUtils.java
@@ -0,0 +1,232 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import com.google.common.collect.ImmutableSet;
+import java.util.Set;
+
+/**
+ * A utility class for handling Windows specific accounts and SIDs.
+ * 
+ */
+final class WindowsAccountUtils {
+	
+	// Special Windows Accounts with short SIDS are given a special realm "address".
+	final static String SPECIAL_WINDOWS_REALM_ADDR = "SPECIAL_WINDOWS_ACCOUNTS";
+	
+	final static String SPECIAL_WINDOWS_BACK_UP_POSTFIX = ".bak";
+	
+	
+	// Windows uses SIDs for groups as well as users. 
+	// We dont want to create "User" account for group SIDs.
+	// The lists here help us identify and weed out group SIDs when creating accounts.
+	private static final Set<String> GROUP_SIDS = ImmutableSet.of(
+			"S-1-0-0",	// Null SID
+			"S-1-1-0",	// Everyone
+			"S-1-2-0",	// Local - anyone who has logged on locally
+			"S-1-2-1",	// Console Logon
+			
+			"S-1-3-1",	// Creator
+			"S-1-3-4",	// Owner rights
+			
+			"S-1-5-1",	// Dialup
+			"S-1-5-2",	// Network
+			"S-1-5-3",	// Batch
+			"S-1-5-4",	// Interactive
+			"S-1-5-6",	// Service
+			"S-1-5-7",	// Anonymous
+			"S-1-5-9",	// Enterprise Domain Controllers
+			
+			"S-1-5-11",	// Authenticated Users
+			"S-1-5-12",	// Restricted Code - not a group but not a user SID either
+			"S-1-5-13",	// Terminal Server Users
+			"S-1-5-14",	// Remote Interactive Logon
+			
+			"S-1-5-15",	// This Organization
+			
+			"S-1-5-80-0",	// All Services
+			"S-1-5-83-0",	// NT Virtual Machine\Virtual Machines
+			"S-1-5-90-0"	// Windows Manager\Windows Manager Group
+				
+	);
+	
+	// Any SIDs with the following prefixes are group SID and should be excluded.
+	private static final Set<String> GROUP_SID_PREFIX = ImmutableSet.of(
+			"S-1-5-32"		// Builtin
+			
+	);
+	
+	// SIDS that begin with a domain SID prefix and have on of these 
+	private static final String DOMAIN_SID_PREFIX = "S-1-5";	
+	private static final Set<String> DOMAIN_GROUP_SID_SUFFIX = ImmutableSet.of(
+			"-512",		// Domain Admins
+			"-513",		// Domain Users
+			
+			"-514",		// Domain Guests
+			"-515",		// Domain Computers	
+			"-516",		// Domain Controllers
+			"-517",		// Cert Publishers
+			
+			"-518",		// Schema Admins
+			"-519",		// Enterprise Admins
+			"-520",		// Group Policy Creator Owners
+			
+			"-526",		// Key Admins
+			"-527",		// Enterprise Key Admins
+			
+			"-533",		// RAS and IAS Servers
+			
+			// Windows 2008 and later
+			"-498",		// Enterprise Read-only Domain Controllers
+			"-521",		// Read-only Domain Controllers
+			"-571",		// Allowed RODC Password Replication Group
+			"-572",		// Denied RODC Password Replication Group
+			
+			// Windows 2012 and later
+			"-522"		// Cloneable Domain Controllers
+	);
+	
+	
+	
+	// Some windows SID indicate special account.
+	// These should be handled differently from regular user accounts.
+	private static final Set<String> SPECIAL_SIDS = ImmutableSet.of(
+			"S-1-5-18",	// LOCAL_SYSTEM_ACCOUNT
+			"S-1-5-19", // LOCAL_SERVICE_ACCOUNT
+			"S-1-5-20" // NETWORK_SERVICE_ACCOUNT
+	);
+	private static final Set<String> SPECIAL_SID_PREFIXES = ImmutableSet.of(
+			"S-1-5-80",	// Virtual Service accounts
+			"S-1-5-82", // AppPoolIdentity Virtual accounts. 
+			"S-1-5-83", // Virtual Machine  Virtual Accounts.
+			"S-1-5-90", // Windows Manager Virtual Accounts. 
+			"S-1-5-96" // Font Drive Host Virtual Accounts.
+	);
+	
+	
+	/**
+	 * Checks if the given SID is a special Windows SID.
+	 * 
+	 * @param sid SID to check.
+	 * 
+	 * @return True if the SID is a Windows special SID, false otherwise 
+	 */
+	static boolean isWindowsSpecialSid(String sid) {
+		String tempSID = stripWindowsBackupPostfix(sid);
+		
+		if (SPECIAL_SIDS.contains(tempSID)) {
+			return true;
+		}
+		for (String specialPrefix: SPECIAL_SID_PREFIXES) {
+			if (tempSID.startsWith(specialPrefix)) {
+				return true;
+			}
+		}
+		return false;
+	}
+	
+	
+	/**
+	 * Checks if the given SID is a user SID.
+	 * 
+	 * If the given SID is not found among the known group SIDs, is considered a user SID.
+	 * 
+	 * @param sid SID to check.
+	 * 
+	 * @return True if the SID is a user SID, false otherwise 
+	 */
+	static boolean isWindowsUserSid(String sid) {
+		
+		String tempSID = stripWindowsBackupPostfix(sid);
+		
+		if (GROUP_SIDS.contains(tempSID)) {
+			return false;
+		}
+		
+		for (String prefix: GROUP_SID_PREFIX) {
+			if (tempSID.startsWith(prefix)) {
+				return false;
+			}
+		}
+		
+		// check for domain groups - they have a domains specific identifier but have a fixed prefix and suffix
+		if (tempSID.startsWith(DOMAIN_SID_PREFIX)) {
+			for (String suffix : DOMAIN_GROUP_SID_SUFFIX) {
+				if (tempSID.endsWith(suffix)) {
+					return false;
+				}
+			}
+		}
+		
+		return true;
+		
+	}
+	
+	/**
+	 * Get the windows realm address from the given SID.
+	 * 
+	 * For all regular account SIDs, the realm address is the sub-authority SID.
+	 * For special Windows account the realm address is a special address, 
+	 * SPECIAL_WINDOWS_REALM_ADDR { @link WindowsAccountUtils.SPECIAL_WINDOWS_REALM_ADDR}
+	 * 
+	 * @param sid SID
+	 * 
+	 * @return Realm address for the SID.
+	 * 
+	 * @throws TskCoreException If the given SID is not a valid host/domain SID.
+	 */
+	public static String getWindowsRealmAddress(String sid) throws TskCoreException {
+		
+		String realmAddr;
+		String tempSID = stripWindowsBackupPostfix(sid);
+		
+		// When copying realms into portable cases, the SID may already be set to the special windows string.
+		if (isWindowsSpecialSid(tempSID) || tempSID.equals(SPECIAL_WINDOWS_REALM_ADDR)) {
+			realmAddr = SPECIAL_WINDOWS_REALM_ADDR;
+		} else {
+			// regular SIDs should have at least 5 components: S-1-x-y-z
+			if (org.apache.commons.lang3.StringUtils.countMatches(tempSID, "-") < 4) {
+				throw new TskCoreException(String.format("Invalid SID %s for a host/domain", tempSID));
+			}
+			// get the sub authority SID
+			realmAddr = sid.substring(0, tempSID.lastIndexOf('-'));
+		}
+
+		return realmAddr;
+	}
+	
+	/**
+	 * Backup windows sid will include the postfix .bak on the end of the sid.
+	 * Remove the postfix for easier processing.
+	 * 
+	 * @param sid 
+	 * 
+	 * @return The sid with the postfix removed.
+	 */
+	private static String stripWindowsBackupPostfix(String sid) {
+		String tempSID = sid;
+		
+		if(tempSID.endsWith(SPECIAL_WINDOWS_BACK_UP_POSTFIX)) {
+			tempSID = tempSID.replace(SPECIAL_WINDOWS_BACK_UP_POSTFIX, "");
+		}
+		
+		return tempSID;
+	}
+	
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java
index 26dfea72b9e22d44c922df678e53c20108f76980..dbd5eee7fb219d3f87d5f7db1e8c05c1e9dd4b18 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java
@@ -21,10 +21,14 @@
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
+import java.util.Optional;
+import org.sleuthkit.datamodel.AbstractFile;
 import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
 import org.sleuthkit.datamodel.BlackboardAttribute;
 import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.OsAccount;
 import org.sleuthkit.datamodel.SleuthkitCase;
 import org.sleuthkit.datamodel.TskCoreException;
 
@@ -33,7 +37,8 @@
  *
  */
 public final class ArtifactsHelper extends ArtifactHelperBase {
-
+	private static final BlackboardArtifact.Type INSTALLED_PROG_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_INSTALLED_PROG);
+	
 	/**
 	 * Creates an artifact helper for modules to create artifacts.
 	 *
@@ -79,20 +84,19 @@ public BlackboardArtifact addInstalledProgram(String programName, long dateInsta
 	public BlackboardArtifact addInstalledProgram(String programName, long dateInstalled,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
-		BlackboardArtifact installedProgramArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
-
-		// create artifact
-		installedProgramArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INSTALLED_PROG);
-
+		
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getModuleName(), programName));
 		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, dateInstalled, attributes);
 
 		// add the attributes 
 		attributes.addAll(otherAttributesList);
-		installedProgramArtifact.addAttributes(attributes);
 
+		// create artifact
+		Content content = getContent();
+		BlackboardArtifact installedProgramArtifact = content.newDataArtifact(INSTALLED_PROG_TYPE, attributes);
+		
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(installedProgramArtifact, getModuleName());
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java
index 9343f57021fc4411316cb5aff4a72e4461452e6f..374754febb7521522d42dbac337b14b9b4a75a73 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java
@@ -23,6 +23,7 @@
 import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Optional;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.apache.commons.lang3.StringUtils;
@@ -37,6 +38,7 @@
 import org.sleuthkit.datamodel.Content;
 import org.sleuthkit.datamodel.DataSource;
 import org.sleuthkit.datamodel.InvalidAccountIDException;
+import org.sleuthkit.datamodel.OsAccount;
 import org.sleuthkit.datamodel.Relationship;
 import org.sleuthkit.datamodel.SleuthkitCase;
 import org.sleuthkit.datamodel.TskCoreException;
@@ -68,6 +70,12 @@
 public final class CommunicationArtifactsHelper extends ArtifactHelperBase {
 
 	private static final Logger LOGGER = Logger.getLogger(CommunicationArtifactsHelper.class.getName());
+
+	private static final BlackboardArtifact.Type CONTACT_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_CONTACT);
+	private static final BlackboardArtifact.Type MESSAGE_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_MESSAGE);
+	private static final BlackboardArtifact.Type CALLOG_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_CALLLOG);
+	private static final BlackboardArtifact.Type ASSOCIATED_OBJ_TYPE = new BlackboardArtifact.Type(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT);
+
 	/**
 	 * Enum for message read status
 	 */
@@ -125,14 +133,13 @@ public String getDisplayName() {
 			return typeStr;
 		}
 	}
-	
-	private static final BlackboardAttribute.Type ATTACHMENTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ATTACHMENTS);
 
+	private static final BlackboardAttribute.Type ATTACHMENTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ATTACHMENTS);
 
 	// 'self' account for the application being processed by the module. 
 	private final Account.Type selfAccountType;
 	private final String selfAccountId;
-			
+
 	private AccountFileInstance selfAccountInstance = null;
 
 	// Type of accounts to be created for the module using this helper.
@@ -276,12 +283,9 @@ public BlackboardArtifact addContact(String contactName,
 		BlackboardArtifact contactArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// create TSK_CONTACT artifact
-		contactArtifact = getContent().newArtifact(ARTIFACT_TYPE.TSK_CONTACT);
-
-		// construct attributes
+		// create TSK_CONTACT artifact and construct attributes
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_NAME, contactName, attributes);
-		
+
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, phoneNumber, attributes);
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME, homePhoneNumber, attributes);
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE, mobilePhoneNumber, attributes);
@@ -289,7 +293,9 @@ public BlackboardArtifact addContact(String contactName,
 
 		// add attributes
 		attributes.addAll(additionalAttributes);
-		contactArtifact.addAttributes(attributes);
+		Content content = getContent();
+
+		contactArtifact = content.newDataArtifact(CONTACT_TYPE, attributes);
 
 		// create an account for each specified contact method, and a relationship with self account
 		createContactMethodAccountAndRelationship(Account.Type.PHONE, phoneNumber, contactArtifact, 0);
@@ -318,7 +324,8 @@ public BlackboardArtifact addContact(String contactName,
 
 	/**
 	 * Creates a contact's account instance of specified account type, if the
-	 * account id is not null/empty and is a valid account id for the account type.
+	 * account id is not null/empty and is a valid account id for the account
+	 * type.
 	 *
 	 * Also creates a CONTACT relationship between the self account and the new
 	 * contact account.
@@ -341,8 +348,7 @@ private void createContactMethodAccountAndRelationship(Account.Type accountType,
 					throw new TskCoreException(String.format("Failed to create relationship between account = %s and account = %s.",
 							getSelfAccountInstance().getAccount(), contactAccountInstance.getAccount()), ex);
 				}
-			}
-			catch (InvalidAccountIDException ex) {
+			} catch (InvalidAccountIDException ex) {
 				LOGGER.log(Level.WARNING, String.format("Failed to create account with id %s", accountUniqueID));
 			}
 		}
@@ -503,12 +509,8 @@ public BlackboardArtifact addMessage(String messageType,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
 		// Created message artifact.  
-		BlackboardArtifact msgArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// create TSK_MESSAGE artifact
-		msgArtifact = getContent().newArtifact(ARTIFACT_TYPE.TSK_MESSAGE);
-
 		// construct attributes
 		attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE, getModuleName(), messageType));
 		addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME, dateTime, attributes);
@@ -517,13 +519,13 @@ public BlackboardArtifact addMessage(String messageType,
 		addCommDirectionIfKnown(direction, attributes);
 
 		// Get the self account instance
-		AccountFileInstance selfAccountInstanceLocal  = null; 
+		AccountFileInstance selfAccountInstanceLocal = null;
 		try {
 			selfAccountInstanceLocal = getSelfAccountInstance();
-		}  catch (InvalidAccountIDException ex) {
+		} catch (InvalidAccountIDException ex) {
 			LOGGER.log(Level.WARNING, String.format("Failed to get/create self account with id %s", selfAccountId), ex);
 		}
-		
+
 		// set sender attribute and create sender account
 		AccountFileInstance senderAccountInstance = null;
 		if (StringUtils.isNotBlank(senderId)) {
@@ -533,7 +535,6 @@ public BlackboardArtifact addMessage(String messageType,
 				LOGGER.log(Level.WARNING, String.format("Invalid account identifier %s", senderId));
 			}
 		}
-		
 
 		// set recipient attribute and create recipient accounts
 		List<AccountFileInstance> recipientAccountsList = new ArrayList<>();
@@ -557,39 +558,38 @@ public BlackboardArtifact addMessage(String messageType,
 				// if no sender, selfAccount substitutes caller.
 				if (StringUtils.isEmpty(senderId) && selfAccountInstanceLocal != null) {
 					senderAccountInstance = selfAccountInstanceLocal;
-				}	
+				}
 				// sender becomes PHONE_FROM
 				if (senderAccountInstance != null) {
 					addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, senderAccountInstance.getAccount().getTypeSpecificID(), attributes);
-				}	
+				}
 				// recipient becomes PHONE_TO
 				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, recipientsStr, attributes);
 				break;
-				
+
 			case INCOMING:
 				// if no recipeint specified, selfAccount substitutes recipient
-				if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != null ) {
+				if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != null) {
 					recipientsStr = selfAccountInstanceLocal.getAccount().getTypeSpecificID();
 					recipientAccountsList.add(selfAccountInstanceLocal);
-				}	
+				}
 				// caller becomes PHONE_FROM
 				if (senderAccountInstance != null) {
 					addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, senderAccountInstance.getAccount().getTypeSpecificID(), attributes);
-				}	
+				}
 				// callee becomes PHONE_TO
 				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, recipientsStr, attributes);
 				break;
 			default:  // direction UNKNOWN
-				if (StringUtils.isEmpty(senderId) && selfAccountInstanceLocal != null ) {
+				if (StringUtils.isEmpty(senderId) && selfAccountInstanceLocal != null) {
 					// if no sender, selfAccount substitutes caller.
 					senderAccountInstance = selfAccountInstanceLocal;
-				}
-				else if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != null) {
+				} else if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != null) {
 					// else if no recipient specified, selfAccount substitutes recipient
 					recipientsStr = selfAccountInstanceLocal.getAccount().getTypeSpecificID();
 					recipientAccountsList.add(selfAccountInstanceLocal);
-				}	
-				
+				}
+
 				// save phone numbers in direction agnostic attributes
 				if (senderAccountInstance != null) {
 					addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, senderAccountInstance.getAccount().getTypeSpecificID(), attributes);
@@ -597,14 +597,17 @@ else if (isEffectivelyEmpty(recipientIdsList) && selfAccountInstanceLocal != nul
 				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, recipientsStr, attributes);
 				break;
 		}
-		
+
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_SUBJECT, subject, attributes);
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_TEXT, messageText, attributes);
 		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_THREAD_ID, threadId, attributes);
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
-		msgArtifact.addAttributes(attributes);
+
+		// create TSK_MESSAGE artifact
+		Content content = getContent();
+		BlackboardArtifact msgArtifact = content.newDataArtifact(MESSAGE_TYPE, attributes);
 
 		// create sender/recipient relationships  
 		try {
@@ -758,18 +761,14 @@ public BlackboardArtifact addCalllog(CommunicationDirection direction,
 			throw new IllegalArgumentException("Either a caller id, or at least one callee id must be provided for a call log.");
 		}
 
-		AccountFileInstance selfAccountInstanceLocal  = null; 
+		AccountFileInstance selfAccountInstanceLocal = null;
 		try {
 			selfAccountInstanceLocal = getSelfAccountInstance();
-		}  catch (InvalidAccountIDException ex) {
+		} catch (InvalidAccountIDException ex) {
 			LOGGER.log(Level.WARNING, String.format("Failed to get/create self account with id %s", selfAccountId), ex);
 		}
-		
-		BlackboardArtifact callLogArtifact;
-		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// Create TSK_CALLLOG artifact
-		callLogArtifact = getContent().newArtifact(ARTIFACT_TYPE.TSK_CALLLOG);
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
 		// Add basic attributes 
 		addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME_START, startDateTime, attributes);
@@ -784,7 +783,7 @@ public BlackboardArtifact addCalllog(CommunicationDirection direction,
 				LOGGER.log(Level.WARNING, String.format("Failed to create account with id %s", callerId));
 			}
 		}
-		
+
 		// Create a comma separated string of callee
 		List<AccountFileInstance> recipientAccountsList = new ArrayList<>();
 		String calleesStr = "";
@@ -792,53 +791,52 @@ public BlackboardArtifact addCalllog(CommunicationDirection direction,
 			calleesStr = addressListToString(calleeIdsList);
 			for (String callee : calleeIdsList) {
 				if (StringUtils.isNotBlank(callee)) {
-					try{
+					try {
 						recipientAccountsList.add(createAccountInstance(moduleAccountsType, callee));
-					}
-					catch (InvalidAccountIDException ex) {
+					} catch (InvalidAccountIDException ex) {
 						LOGGER.log(Level.WARNING, String.format("Failed to create account with id %s", callerId));
 					}
 				}
 			}
 		}
-		
+
 		switch (direction) {
 			case OUTGOING:
 				// if no callee throw IllegalArg
 				if (isEffectivelyEmpty(calleeIdsList)) {
 					throw new IllegalArgumentException("Callee not provided for an outgoing call.");
-				}	
+				}
 				// if no caller, selfAccount substitutes caller.
-				if (StringUtils.isEmpty(callerId) && selfAccountInstanceLocal != null ) {
+				if (StringUtils.isEmpty(callerId) && selfAccountInstanceLocal != null) {
 					callerAccountInstance = selfAccountInstanceLocal;
-				}	
+				}
 				// caller becomes PHONE_FROM
 				if (callerAccountInstance != null) {
 					addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, callerAccountInstance.getAccount().getTypeSpecificID(), attributes);
-				}	
+				}
 				// callee becomes PHONE_TO
 				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, calleesStr, attributes);
 				break;
-				
+
 			case INCOMING:
 				// if no caller throw IllegalArg
 				if (StringUtils.isEmpty(callerId)) {
 					throw new IllegalArgumentException("Caller Id not provided for incoming call.");
-				}	
+				}
 				// if no callee specified, selfAccount substitutes callee
 				if (isEffectivelyEmpty(calleeIdsList) && selfAccountInstanceLocal != null) {
 					calleesStr = selfAccountInstanceLocal.getAccount().getTypeSpecificID();
 					recipientAccountsList.add(selfAccountInstanceLocal);
-				}	
+				}
 				// caller becomes PHONE_FROM
 				if (callerAccountInstance != null) {
 					addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, callerAccountInstance.getAccount().getTypeSpecificID(), attributes);
-				}	
+				}
 				// callee becomes PHONE_TO
 				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, calleesStr, attributes);
 				break;
 			default:  // direction UNKNOWN
-				
+
 				// save phone numbers in direction agnostic attributes
 				if (callerAccountInstance != null) {
 					addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, callerAccountInstance.getAccount().getTypeSpecificID(), attributes);
@@ -849,6 +847,10 @@ public BlackboardArtifact addCalllog(CommunicationDirection direction,
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
+		// Create TSK_CALLLOG artifact
+		Content content = getContent();
+		BlackboardArtifact callLogArtifact = content.newDataArtifact(CALLOG_TYPE, attributes);
+
 		callLogArtifact.addAttributes(attributes);
 
 		// create relationships between caller/callees
@@ -857,7 +859,7 @@ public BlackboardArtifact addCalllog(CommunicationDirection direction,
 					recipientAccountsList, callLogArtifact, Relationship.Type.CALL_LOG, startDateTime);
 		} catch (TskDataException ex) {
 			throw new TskCoreException(String.format("Failed to create Call log relationships between caller account = %s and callees = %s.",
-					(callerAccountInstance!= null) ? callerAccountInstance.getAccount() : "", calleesStr), ex);
+					(callerAccountInstance != null) ? callerAccountInstance.getAccount() : "", calleesStr), ex);
 		}
 
 		// post artifact 
@@ -866,7 +868,6 @@ public BlackboardArtifact addCalllog(CommunicationDirection direction,
 		// return the artifact
 		return callLogArtifact;
 	}
-	
 
 	/**
 	 * Adds attachments to a message.
@@ -908,11 +909,9 @@ private BlackboardArtifact associateAttachmentWithMessage(BlackboardArtifact mes
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ASSOCIATED_ARTIFACT, this.getModuleName(), message.getArtifactID()));
 
-		BlackboardArtifact bba = attachedFile.newArtifact(ARTIFACT_TYPE.TSK_ASSOCIATED_OBJECT);
-		bba.addAttributes(attributes); //write out to bb
-		return bba;
+		return attachedFile.newDataArtifact(ASSOCIATED_OBJ_TYPE, attributes);
 	}
-	
+
 	/**
 	 * Converts a list of ids into a single comma separated string.
 	 */
@@ -976,10 +975,12 @@ private void addMessageReadStatusIfKnown(MessageReadStatus readStatus, Collectio
 	}
 
 	/**
-	 * Returns self account instance.  Lazily creates it if one doesn't exist yet.
-	 * 
+	 * Returns self account instance. Lazily creates it if one doesn't exist
+	 * yet.
+	 *
 	 * @return Self account instance.
-	 * @throws TskCoreException 
+	 *
+	 * @throws TskCoreException
 	 */
 	private synchronized AccountFileInstance getSelfAccountInstance() throws TskCoreException, InvalidAccountIDException {
 		if (selfAccountInstance == null) {
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/GeoArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/GeoArtifactsHelper.java
index b0cf6e64c4d151336013ee3a65ccaf28ad335283..4e51e3681cce21334c23aca8f1942fb370e4afc2 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/GeoArtifactsHelper.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/GeoArtifactsHelper.java
@@ -20,6 +20,7 @@
 
 import java.util.ArrayList;
 import java.util.List;
+import org.sleuthkit.datamodel.AbstractFile;
 import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.BlackboardArtifact;
 import org.sleuthkit.datamodel.BlackboardAttribute;
@@ -40,6 +41,11 @@ public final class GeoArtifactsHelper extends ArtifactHelperBase {
 	private static final BlackboardAttribute.Type WAYPOINTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_WAYPOINTS);
 	private static final BlackboardAttribute.Type TRACKPOINTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_TRACKPOINTS);
 	private static final BlackboardAttribute.Type AREAPOINTS_ATTR_TYPE = new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_AREAPOINTS);
+	
+	private static final BlackboardArtifact.Type GPS_TRACK_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACK);
+	private static final BlackboardArtifact.Type GPS_ROUTE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_ROUTE);
+	private static final BlackboardArtifact.Type GPS_AREA_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA);
+	
 	private final String programName;
 
 	/**
@@ -105,8 +111,8 @@ public BlackboardArtifact addTrack(String trackName, GeoTrackPoints trackPoints,
 			attributes.addAll(moreAttributes);
 		}
 
-		BlackboardArtifact artifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACK);
-		artifact.addAttributes(attributes);
+		Content content = getContent();
+		BlackboardArtifact artifact = content.newDataArtifact(GPS_TRACK_TYPE, attributes);
 
 		getSleuthkitCase().getBlackboard().postArtifact(artifact, getModuleName());
 
@@ -124,7 +130,7 @@ public BlackboardArtifact addTrack(String trackName, GeoTrackPoints trackPoints,
 	 * @param creationTime   The time at which the route was created as
 	 *                       milliseconds from the Java epoch of
 	 *                       1970-01-01T00:00:00Z, may be null.
-	 * @param wayPoints      The waypoints that make up the route.  This list
+	 * @param wayPoints      The waypoints that make up the route. This list
 	 *                       should be non-null and non-empty.
 	 * @param moreAttributes Additional attributes for the TSK_GPS_ROUTE
 	 *                       artifact, may be null.
@@ -143,7 +149,6 @@ public BlackboardArtifact addRoute(String routeName, Long creationTime, GeoWaypo
 			throw new IllegalArgumentException(String.format("addRoute was passed a null or empty list of waypoints"));
 		}
 
-		BlackboardArtifact artifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_ROUTE);
 		List<BlackboardAttribute> attributes = new ArrayList<>();
 
 		attributes.add(BlackboardJsonAttrUtil.toAttribute(WAYPOINTS_ATTR_TYPE, getModuleName(), wayPoints));
@@ -164,20 +169,22 @@ public BlackboardArtifact addRoute(String routeName, Long creationTime, GeoWaypo
 			attributes.addAll(moreAttributes);
 		}
 
-		artifact.addAttributes(attributes);
-
+		Content content = getContent();
+		BlackboardArtifact artifact = content.newDataArtifact(GPS_ROUTE_TYPE, attributes);
+		
 		getSleuthkitCase().getBlackboard().postArtifact(artifact, getModuleName());
 
 		return artifact;
 	}
+
 	/**
 	 * Adds a TSK_GPS_AREA artifact to the case database. A Global Positioning
-	 * System (GPS) area artifact records an area on the map outlines by
-	 * an ordered set of GPS coordinates.
+	 * System (GPS) area artifact records an area on the map outlines by an
+	 * ordered set of GPS coordinates.
 	 *
 	 * @param areaName       The name of the GPS area, may be null.
-	 * @param areaPoints     The points that make up the outline of the area.  This list
-	 *                       should be non-null and non-empty.
+	 * @param areaPoints     The points that make up the outline of the area.
+	 *                       This list should be non-null and non-empty.
 	 * @param moreAttributes Additional attributes for the TSK_GPS_AREA
 	 *                       artifact, may be null.
 	 *
@@ -194,8 +201,7 @@ public BlackboardArtifact addArea(String areaName, GeoAreaPoints areaPoints, Lis
 		if (areaPoints == null || areaPoints.isEmpty()) {
 			throw new IllegalArgumentException(String.format("addArea was passed a null or empty list of points"));
 		}
-		
-		BlackboardArtifact artifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA);
+
 		List<BlackboardAttribute> attributes = new ArrayList<>();
 		attributes.add(BlackboardJsonAttrUtil.toAttribute(AREAPOINTS_ATTR_TYPE, getModuleName(), areaPoints));
 
@@ -211,8 +217,9 @@ public BlackboardArtifact addArea(String areaName, GeoAreaPoints areaPoints, Lis
 			attributes.addAll(moreAttributes);
 		}
 
-		artifact.addAttributes(attributes);
-
+		Content content = getContent();
+		BlackboardArtifact artifact = content.newDataArtifact(GPS_AREA_TYPE, attributes);
+		
 		getSleuthkitCase().getBlackboard().postArtifact(artifact, getModuleName());
 
 		return artifact;
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java
index 3cc193a40acd95a9cb1848a1f635b4e8c1767872..28f9b1b21d487db5463ab3f02f71891126d22576 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java
@@ -27,6 +27,7 @@
 import java.util.logging.Level;
 import java.util.logging.Logger;
 import org.apache.commons.lang3.StringUtils;
+import org.sleuthkit.datamodel.AbstractFile;
 import org.sleuthkit.datamodel.Account;
 import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.BlackboardArtifact;
@@ -47,7 +48,13 @@
 public final class WebBrowserArtifactsHelper extends ArtifactHelperBase {
 
 	private static final Logger LOGGER = Logger.getLogger(WebBrowserArtifactsHelper.class.getName());
-	
+	private static final BlackboardArtifact.Type WEB_BOOKMARK_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK);
+	private static final BlackboardArtifact.Type WEB_COOKIE_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
+	private static final BlackboardArtifact.Type WEB_DOWNLOAD_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD);
+	private static final BlackboardArtifact.Type WEB_FORM_ADDRESS_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS);
+	private static final BlackboardArtifact.Type WEB_FORM_AUTOFILL_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL);
+	private static final BlackboardArtifact.Type WEB_HISTORY_TYPE = new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY);
+		
 	/**
 	 * Creates a WebBrowserArtifactsHelper.
 	 *
@@ -97,12 +104,8 @@ public BlackboardArtifact addWebBookmark(String url, String title, long creation
 	public BlackboardArtifact addWebBookmark(String url, String title, long creationTime, String progName,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
-		BlackboardArtifact bookMarkArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// create artifact
-		bookMarkArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK);
-
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
 
@@ -113,7 +116,9 @@ public BlackboardArtifact addWebBookmark(String url, String title, long creation
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
-		bookMarkArtifact.addAttributes(attributes);
+		
+		Content content = getContent();
+		BlackboardArtifact bookMarkArtifact = content.newDataArtifact(WEB_BOOKMARK_TYPE, attributes);
 
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(bookMarkArtifact, getModuleName());
@@ -167,12 +172,8 @@ public BlackboardArtifact addWebCookie(String url,
 			long creationTime, String name, String value, String programName,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
-		BlackboardArtifact cookieArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// create artifact
-		cookieArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
-
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
 
@@ -184,8 +185,10 @@ public BlackboardArtifact addWebCookie(String url,
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
-		cookieArtifact.addAttributes(attributes);
-
+		
+		Content content = getContent();
+		BlackboardArtifact cookieArtifact = content.newDataArtifact(WEB_COOKIE_TYPE, attributes);
+		
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(cookieArtifact, getModuleName());
 
@@ -229,12 +232,8 @@ public BlackboardArtifact addWebDownload(String url, long startTime, String path
 	public BlackboardArtifact addWebDownload(String url, long startTime, String path, String programName,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
-		BlackboardArtifact webDownloadArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// reate artifact
-		webDownloadArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD);
-
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getModuleName(), path));
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
@@ -245,8 +244,10 @@ public BlackboardArtifact addWebDownload(String url, long startTime, String path
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
-		webDownloadArtifact.addAttributes(attributes);
-
+		
+		Content content = getContent();
+		BlackboardArtifact webDownloadArtifact = content.newDataArtifact(WEB_DOWNLOAD_TYPE, attributes);
+		
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(webDownloadArtifact, getModuleName());
 
@@ -300,7 +301,6 @@ public BlackboardArtifact addWebFormAddress(String personName, String email,
 			long creationTime, long accessTime, int count,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
-		BlackboardArtifact webFormAddressArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 		
 		CommunicationsManager commManager = this.getSleuthkitCase().getCommunicationsManager();
@@ -320,10 +320,7 @@ public BlackboardArtifact addWebFormAddress(String personName, String email,
 				LOGGER.log(Level.WARNING, String.format("Invalid account identifier %s", phoneNumber), ex);
 			}
 		}
-
-		// create artifact
-		webFormAddressArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS);
-
+		
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), personName));
 
@@ -336,9 +333,9 @@ public BlackboardArtifact addWebFormAddress(String personName, String email,
 		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, count, attributes);
 
 		// add artifact
-		attributes.addAll(otherAttributesList);
-		webFormAddressArtifact.addAttributes(attributes);
-
+		Content content = getContent();
+		BlackboardArtifact webFormAddressArtifact = content.newDataArtifact(WEB_FORM_ADDRESS_TYPE, attributes);
+		
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(webFormAddressArtifact, getModuleName());
 
@@ -386,12 +383,9 @@ public BlackboardArtifact addWebFormAutofill(String name, String value,
 	public BlackboardArtifact addWebFormAutofill(String name, String value,
 			long creationTime, long accessTime, int count,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
-		BlackboardArtifact webFormAutofillArtifact;
+		
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// create artifact
-		webFormAutofillArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL);
-
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), name));
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, getModuleName(), value));
@@ -402,8 +396,10 @@ public BlackboardArtifact addWebFormAutofill(String name, String value,
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
-		webFormAutofillArtifact.addAttributes(attributes);
 
+		Content content = getContent();
+		BlackboardArtifact webFormAutofillArtifact = content.newDataArtifact(WEB_FORM_AUTOFILL_TYPE, attributes);
+		
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(webFormAutofillArtifact, getModuleName());
 
@@ -452,12 +448,8 @@ public BlackboardArtifact addWebHistory(String url, long accessTime,
 			String referrer, String title, String programName,
 			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
 
-		BlackboardArtifact webHistoryArtifact;
 		Collection<BlackboardAttribute> attributes = new ArrayList<>();
 
-		// create artifact
-		webHistoryArtifact = getContent().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY);
-
 		// construct attributes 
 		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
 
@@ -470,8 +462,10 @@ public BlackboardArtifact addWebHistory(String url, long accessTime,
 
 		// add attributes to artifact
 		attributes.addAll(otherAttributesList);
-		webHistoryArtifact.addAttributes(attributes);
-
+		
+		Content content = getContent();
+		BlackboardArtifact webHistoryArtifact = content.newDataArtifact(WEB_HISTORY_TYPE, attributes);
+		
 		// post artifact 
 		getSleuthkitCase().getBlackboard().postArtifact(webHistoryArtifact, getModuleName());
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/localization/lastupdated.properties b/bindings/java/src/org/sleuthkit/datamodel/localization/lastupdated.properties
index 386e87ecdcc4c833c6597f3582a9af9a4af3d0ee..8d365ea9d44c2e2da3f3bb1969d089e6d5c29bad 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/localization/lastupdated.properties
+++ b/bindings/java/src/org/sleuthkit/datamodel/localization/lastupdated.properties
@@ -1,2 +1,2 @@
-#Tue Feb 16 13:50:31 UTC 2021
-bundles.ja.lastupdated=1f58a60f55bafef28d942d87b378d76501dfab7c
+#Thu Jul 01 12:01:30 UTC 2021
+bundles.ja.lastupdated=8e19cd639b4cbc45f216c427008de0afb2ccbe02
diff --git a/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java b/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..16511bfb11c9ff4ce5d1650d07a26fec9d321018
--- /dev/null
+++ b/bindings/java/test/org/sleuthkit/datamodel/ArtifactTest.java
@@ -0,0 +1,426 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.junit.After;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Test artifact apis.
+ * 
+ */
+public class ArtifactTest {
+	
+	private static final String MODULE_NAME = "ArtifactTest";
+	
+	private static final Logger LOGGER = Logger.getLogger(ArtifactTest.class.getName());
+
+	private static SleuthkitCase caseDB;
+
+	private final static String TEST_DB = "ArtifactApiTest.db";
+
+
+	private static String dbPath = null;
+	private static Image image = null;
+	private static FileSystem fs = null;
+	
+	
+
+	public ArtifactTest (){
+
+	}
+	
+	@BeforeClass
+	public static void setUpClass() {
+		String tempDirPath = System.getProperty("java.io.tmpdir");
+		try {
+			dbPath = Paths.get(tempDirPath, TEST_DB).toString();
+
+			// Delete the DB file, in case
+			java.io.File dbFile = new java.io.File(dbPath);
+			dbFile.delete();
+			if (dbFile.getParentFile() != null) {
+				dbFile.getParentFile().mkdirs();
+			}
+
+			// Create new case db
+			caseDB = SleuthkitCase.newCase(dbPath);
+			
+			// uncomment to manually test with PostgreSQL
+			//CaseDbConnectionInfo connectionInfo = new CaseDbConnectionInfo("HostName", "5432", "User", "Password", TskData.DbType.POSTGRESQL);
+			//caseDB = SleuthkitCase.newCase("TskArtifactTest", connectionInfo, tempDirPath);
+
+			SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+
+			image = caseDB.addImage(TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_DETECT, 512, 1024, "", Collections.emptyList(), "America/NewYork", null, null, null, "first", trans);
+
+			fs = caseDB.addFileSystem(image.getId(), 0, TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_RAW, 0, 0, 0, 0, 0, "", trans);
+
+			trans.commit();
+
+			System.out.println("Artifacts Test DB created at: " + dbPath);
+		} catch (TskCoreException ex) {
+			LOGGER.log(Level.SEVERE, "Failed to create new case", ex);
+		}
+	}
+
+	@AfterClass
+	public static void tearDownClass() {
+
+	}
+
+	@Before
+	public void setUp() {
+	}
+
+	@After
+	public void tearDown() {
+	}
+	
+	@Test
+	public void artifactTests() throws TskCoreException, Blackboard.BlackboardException, OsAccountManager.NotUserSIDException {
+
+
+		// first add a few files.
+		
+		String testMD5 = "c67017ead6356b987b30536d35e8f562";
+		List<Attribute> fileAttributes = new ArrayList<>();
+		fileAttributes.add(new Attribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED), 1611233915l));
+
+		List<Attribute> fileAttributes2 = new ArrayList<>();
+		fileAttributes2.add(new Attribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID), "S-1-15-3443-2233"));
+
+
+		long dataSourceObjectId = fs.getDataSource().getId();
+		
+		SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+
+		// Add a root folder
+		FsContent _root = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, null, false, fs, null, null, Collections.emptyList(), trans);
+
+		// Add a dir - no attributes 
+		FsContent _windows = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "Windows", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, null, false, _root, "S-1-5-80-956008885-3418522649-1831038044-1853292631-227147846", null, Collections.emptyList(), trans);
+
+		// Add dllhosts.exe file to the above dir
+		FsContent _dllhosts = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "dllhosts.exe", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, testMD5, null, "Applicatione/Exe", true, _windows, "S-1-5-32-544", null, fileAttributes, trans);
+
+		// add another no attribute file to the same folder
+		FsContent _nofile = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "nofile.exe", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, "Applicatione/Exe", true, _windows, null, null, Collections.emptyList(), trans);
+		
+		// add another no attribute file to same folder
+		FsContent _abcTextFile = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "abc.txt", 0, 0,
+					TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+					(short) 0, 200, 0, 0, 0, 0, null, null, "Text/Plain", true, _windows, null, null, Collections.emptyList(), trans);
+		
+		// add another no attribute file to same folder
+		FsContent _defTextFile = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "def.txt", 0, 0,
+					TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+					(short) 0, 200, 0, 0, 0, 0, null, null, "Text/Plain", true, _windows, null, null, Collections.emptyList(), trans);			
+		
+		// Add additional attributes to dllhosts file - within the same transaction. 
+		_dllhosts.addAttributes(fileAttributes2, trans);
+	
+		trans.commit();
+		
+		
+		// Create a host and an account.
+		String hostname1 = "host1";
+		String realmName1 = "realm1";
+		String ownerUid1 = "S-1-5-21-111111111-222222222-3333333333-0001";
+
+		Host host1 = caseDB.getHostManager().newHost(hostname1);
+		OsAccountRealm localRealm1 = caseDB.getOsAccountRealmManager().newWindowsRealm(ownerUid1, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount osAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, null, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+
+		// create a 2nd account on the same host
+		String ownerUid2 = "S-1-5-21-111111111-222222222-3333333333-0009";
+		OsAccount osAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid2, null, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+		
+		
+		// now find the file abc.text
+		List<AbstractFile> abctextfiles = caseDB.findFiles(fs.getDataSource(), "abc.txt");
+		assertEquals(1, abctextfiles.size());
+	
+		AbstractFile abcTextFile = abctextfiles.get(0);
+		
+		// create an attribute for the artifact
+        Collection<BlackboardAttribute> attributes = new ArrayList<>();
+        attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD, MODULE_NAME, "keyword1"));
+        
+		// Test: attach an analysis result to the file. 
+		AnalysisResultAdded analysisResultAdded1 = abcTextFile.newAnalysisResult(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT), 
+																		Score.SCORE_LIKELY_NOTABLE, "Keyword hit found", "", "", attributes);
+   
+		assertEquals(Score.Significance.LIKELY_NOTABLE.getId(), analysisResultAdded1.getAnalysisResult().getScore().getSignificance().getId());
+		assertEquals(Score.Priority.NORMAL.getId(), analysisResultAdded1.getAnalysisResult().getScore().getPriority().getId());
+		assertTrue(analysisResultAdded1.getAnalysisResult().getConclusion().equalsIgnoreCase("Keyword hit found"));
+		
+		// Add a 2nd analysis result to the same file
+		AnalysisResultAdded analysisResultAdded2 = abcTextFile.newAnalysisResult(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT), 
+																	Score.SCORE_LIKELY_NOTABLE, "Thats a rather intersting file.", "", "", Collections.emptyList());
+   
+		// Add a 3rd analysis result to the same file 
+		AnalysisResultAdded analysisResultAdded3 = abcTextFile.newAnalysisResult(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_ENCRYPTION_DETECTED), 
+																	Score.SCORE_NOTABLE, "Highly scrambled text!!", "", "", Collections.emptyList());
+		// get analysis results and verify count
+		
+		List<AnalysisResult> ars = abcTextFile.getAllAnalysisResults();
+		assertEquals(3, ars.size());
+		
+		// verify the aggregate score - expect HIGH/Auto - highest of the 3 results added
+		Score aggScore = abcTextFile.getAggregateScore();
+		assertEquals(Score.Significance.NOTABLE.getId(), aggScore.getSignificance().getId());
+		assertEquals(Score.Priority.NORMAL.getId(), aggScore.getPriority().getId());
+		
+		// delete an anlysis result 3
+		Score newScore = caseDB.getBlackboard().deleteAnalysisResult(analysisResultAdded3.getAnalysisResult());
+		
+		// get analysis results and verify count
+		ars = abcTextFile.getAllAnalysisResults();
+		assertEquals(2, ars.size());
+		
+		// verify aggregate score - should now be Medium/Auto
+		Score newAggScore = abcTextFile.getAggregateScore();
+		assertEquals(Score.Significance.LIKELY_NOTABLE.getId(), newAggScore.getSignificance().getId());
+		assertEquals(Score.Priority.NORMAL.getId(), newAggScore.getPriority().getId());
+		
+		
+		// Test Analysis Results in a Transaction
+		SleuthkitCase.CaseDbTransaction transAr = caseDB.beginTransaction();
+		AnalysisResultAdded analysisResultAdded4 = caseDB.getBlackboard().newAnalysisResult(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT), 
+																	abcTextFile.getId(), abcTextFile.getDataSourceObjectId(), Score.SCORE_LIKELY_NOTABLE, "Thats a rather intersting file.", "", "", Collections.emptyList(), transAr);
+		
+		AnalysisResultAdded analysisResultAdded5 = caseDB.getBlackboard().newAnalysisResult(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT), 
+																	abcTextFile.getId(), abcTextFile.getDataSourceObjectId(), new Score(Score.Significance.LIKELY_NONE, Score.Priority.OVERRIDE), "Thats a rather intersting file.", "", "", Collections.emptyList(), transAr);
+
+		transAr.commit();
+		ars = abcTextFile.getAllAnalysisResults();
+		assertEquals(4, ars.size());
+		
+		// verify aggregate score - should now be Good/User
+		newAggScore = abcTextFile.getAggregateScore();
+		assertEquals(Score.Significance.LIKELY_NONE.getId(), newAggScore.getSignificance().getId());
+		assertEquals(Score.Priority.OVERRIDE.getId(), newAggScore.getPriority().getId());
+
+		
+		
+		// Test: add a new data artifact to the file
+		DataArtifact dataArtifact1 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_SEARCH), Collections.emptyList(), osAccount1.getId());
+        
+		OsAccountManager osAcctMgr = caseDB.getOsAccountManager();
+		
+		assertTrue(dataArtifact1.getOsAccountObjectId().isPresent());
+		assertTrue(osAcctMgr.getOsAccountByObjectId(dataArtifact1.getOsAccountObjectId().get()).getAddr().orElse("").equalsIgnoreCase(ownerUid1));
+		
+		
+		// Test: add a second data artifact to file - associate it with a different account
+		DataArtifact dataArtifact2 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CLIPBOARD_CONTENT), Collections.emptyList(), osAccount2.getId());
+		assertTrue(dataArtifact2.getOsAccountObjectId().isPresent());
+		assertTrue(osAcctMgr.getOsAccountByObjectId(dataArtifact2.getOsAccountObjectId().get()).getAddr().orElse("").equalsIgnoreCase(ownerUid2));
+				
+				
+		// and two more 
+		DataArtifact dataArtifact3 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA), Collections.emptyList(), osAccount2.getId());
+		DataArtifact dataArtifact4 = abcTextFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA), Collections.emptyList(), osAccount2.getId());
+
+		
+		// TEST: get all TSK_GPS_SEARCH data artifacts in the data source
+		List<DataArtifact> gpsArtifacts = caseDB.getBlackboard().getDataArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_SEARCH.getTypeID(), image.getId());
+		assertEquals(1, gpsArtifacts.size());
+
+		// verify the account was set from the query
+		assertTrue(gpsArtifacts.get(0).getOsAccountObjectId().isPresent());
+		assertTrue(osAcctMgr.getOsAccountByObjectId(gpsArtifacts.get(0).getOsAccountObjectId().get()).getAddr().orElse("").equalsIgnoreCase(ownerUid1));
+
+		
+		
+		// TEST: get all data artifacts of type TSK_YARA_HIT
+		List<DataArtifact> gpsAreaArtifacts = caseDB.getBlackboard().getDataArtifacts(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_AREA.getTypeID(), image.getId());
+		assertEquals(2, gpsAreaArtifacts.size());
+		// verify the account on each
+		assertTrue(osAcctMgr.getOsAccountByObjectId(gpsAreaArtifacts.get(0).getOsAccountObjectId().get()).getAddr().orElse("").equalsIgnoreCase(ownerUid2));
+		assertTrue(osAcctMgr.getOsAccountByObjectId(gpsAreaArtifacts.get(1).getOsAccountObjectId().get()).getAddr().orElse("").equalsIgnoreCase(ownerUid2));
+
+		
+		// Testing that artifacts created using the old methods and new methods are treated the same.
+		// Find the file def.text
+		List<AbstractFile> deftextfiles = caseDB.findFiles(fs.getDataSource(), "def.txt");
+		assertEquals(1, deftextfiles.size());
+	
+		AbstractFile defTextFile = deftextfiles.get(0);
+		
+		// Test analysis results.
+		// Using a custom analysis result to for additional test coverage
+		BlackboardArtifact.Type analysisArtType = caseDB.getBlackboard().getOrAddArtifactType("CUSTOM_ANALYSIS_RESULT", "Custom Analysis Result", BlackboardArtifact.Category.ANALYSIS_RESULT);
+
+		AnalysisResultAdded added0 = defTextFile.newAnalysisResult(analysisArtType, Score.SCORE_UNKNOWN, 
+				"", "", null, java.util.Collections.emptyList());
+		trans = caseDB.beginTransaction();
+		AnalysisResultAdded added1 = caseDB.getBlackboard().newAnalysisResult(analysisArtType, defTextFile.getId(), fs.getDataSource().getId(), Score.SCORE_UNKNOWN, 
+				"conclusion1", "config1", "justification1", java.util.Collections.emptyList(), trans);
+		AnalysisResultAdded added2 = caseDB.getBlackboard().newAnalysisResult(analysisArtType, defTextFile.getId(), fs.getDataSource().getId(), Score.SCORE_UNKNOWN, 
+				"", "", null, java.util.Collections.emptyList(), trans);
+		AnalysisResultAdded added3 = caseDB.getBlackboard().newAnalysisResult(analysisArtType, defTextFile.getId(), fs.getDataSource().getId(), Score.SCORE_UNKNOWN, 
+				"", "config3", null, java.util.Collections.emptyList(), trans);
+		AnalysisResultAdded added4 = caseDB.getBlackboard().newAnalysisResult(analysisArtType, defTextFile.getId(), fs.getDataSource().getId(), Score.SCORE_NOTABLE, 
+				"", "", null, java.util.Collections.emptyList(), trans);
+		trans.commit();
+		
+		@SuppressWarnings("deprecation")
+		BlackboardArtifact bbArt2 = defTextFile.newArtifact(analysisArtType.getTypeID());
+		int analysisResultCount = 6;
+		
+		// TEST: getAnalysisResults(file id)
+		List<AnalysisResult> analysisResultResults = caseDB.getBlackboard().getAnalysisResults(defTextFile.getId());
+		assertEquals(analysisResultCount, analysisResultResults.size());
+
+		// TEST: getAnalysisResults(file id, artifact type)
+		analysisResultResults = caseDB.getBlackboard().getAnalysisResults(defTextFile.getId(), analysisArtType.getTypeID());
+		assertEquals(analysisResultCount, analysisResultResults.size());
+
+		// TEST: getAnalysisResultsWhere("obj_id = <file id>")
+		analysisResultResults = caseDB.getBlackboard().getAnalysisResultsWhere("obj_id=" + defTextFile.getId());
+		assertEquals(analysisResultCount, analysisResultResults.size());
+
+		// Test: getArtifacts(artifact type, data source id)
+		List<BlackboardArtifact> artifactResults = caseDB.getBlackboard().getArtifacts(analysisArtType.getTypeID(), fs.getDataSource().getId());
+		assertEquals(analysisResultCount, artifactResults.size());
+		
+		// TEST: getBlackboardArtifact(artifactId) 
+		BlackboardArtifact art = caseDB.getBlackboardArtifact(added0.getAnalysisResult().getArtifactID());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(added1.getAnalysisResult().getArtifactID());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(added2.getAnalysisResult().getArtifactID());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(added3.getAnalysisResult().getArtifactID());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(added4.getAnalysisResult().getArtifactID());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(bbArt2.getArtifactID());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+
+		// TEST: getArtifactById(artifact obj id)
+		art = caseDB.getArtifactById(added0.getAnalysisResult().getId());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(added1.getAnalysisResult().getId());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(added2.getAnalysisResult().getId());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(added3.getAnalysisResult().getId());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(added4.getAnalysisResult().getId());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(bbArt2.getId());
+		assertEquals(analysisArtType.getTypeID(), art.getArtifactTypeID());
+		
+		// Test data artifactst
+		// Using a custom data artifact to for additional test coverage
+		BlackboardArtifact.Type dataArtType = caseDB.getBlackboard().getOrAddArtifactType("CUSTOM_DATA_ARTIFACT", "Custom Data Artifact", BlackboardArtifact.Category.DATA_ARTIFACT);
+
+		// Create five data artifacts. Only three should create a row in tsk_data_artifacts.
+		DataArtifact dataArt1 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), null);
+		DataArtifact dataArt2 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), osAccount2.getId());
+		
+		@SuppressWarnings("deprecation")
+		BlackboardArtifact bbArt1 = defTextFile.newArtifact(dataArtType.getTypeID());
+		DataArtifact dataArt3 = defTextFile.newDataArtifact(dataArtType, java.util.Collections.emptyList(), osAccount2.getId());
+		DataArtifact dataArt4 = caseDB.getBlackboard().newDataArtifact(dataArtType, defTextFile.getId(), fs.getDataSource().getId(), java.util.Collections.emptyList(), osAccount2.getId());
+		int dataArtifactCount = 5;
+		
+		// TEST: getDataArtifacts(artifact type id)
+		List<DataArtifact> dataArtifactResults = caseDB.getBlackboard().getDataArtifacts(dataArtType.getTypeID());
+		assertEquals(dataArtifactCount, dataArtifactResults.size());
+            
+		// TEST: getDataArtifacts(artifact type id, data source id)
+		dataArtifactResults = caseDB.getBlackboard().getDataArtifacts(dataArtType.getTypeID(), fs.getDataSource().getId());
+		assertEquals(dataArtifactCount, dataArtifactResults.size());
+		
+		// TEST: getBlackboardArtifacts(artifact type id, data source id)
+		artifactResults = caseDB.getBlackboardArtifacts(dataArtType.getTypeID());
+		assertEquals(dataArtifactCount, artifactResults.size());
+
+        // TEST: getBlackboardArtifacts(artifact type id, file id)
+        artifactResults = caseDB.getBlackboardArtifacts(dataArtType.getTypeID(), defTextFile.getId());
+		assertEquals(dataArtifactCount, artifactResults.size());
+            
+        // TEST: getArtifacts(artifact type id, data source id)
+        artifactResults = caseDB.getBlackboard().getArtifacts(dataArtType.getTypeID(), fs.getDataSource().getId());
+		assertEquals(dataArtifactCount, artifactResults.size());
+            
+        // TEST: getMatchingArtifacts(where clause)
+        artifactResults = caseDB.getMatchingArtifacts("WHERE artifact_type_id=" + dataArtType.getTypeID());
+		assertEquals(dataArtifactCount, artifactResults.size());
+		
+        // TEST: getBlackboardArtifact(artifactId) 
+		art = caseDB.getBlackboardArtifact(dataArt1.getArtifactID());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(dataArt2.getArtifactID());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(bbArt1.getArtifactID());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(dataArt3.getArtifactID());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getBlackboardArtifact(dataArt4.getArtifactID());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		
+		// TEST: getArtifactById(artifact obj id)
+		art = caseDB.getArtifactById(dataArt1.getId());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(dataArt2.getId());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(bbArt1.getId());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(dataArt3.getId());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());
+		art = caseDB.getArtifactById(dataArt4.getId());
+		assertEquals(dataArtType.getTypeID(), art.getArtifactTypeID());	
+		
+		// TEST: getBlackboardArtifactsCount()
+		assertEquals(analysisResultCount + dataArtifactCount, caseDB.getBlackboardArtifactsCount(defTextFile.getId()));
+		
+		
+		// set a file to unallocated.
+		caseDB.setFileUnalloc(abcTextFile);
+		assertFalse(abcTextFile.isDirNameFlagSet(TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC));
+		assertFalse(abcTextFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC));
+		assertTrue(abcTextFile.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.UNALLOC));
+	}
+}
diff --git a/bindings/java/test/org/sleuthkit/datamodel/AttributeTest.java b/bindings/java/test/org/sleuthkit/datamodel/AttributeTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..37066baf6dec4ef95f1ae1a2910cd0a877152982
--- /dev/null
+++ b/bindings/java/test/org/sleuthkit/datamodel/AttributeTest.java
@@ -0,0 +1,163 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import org.junit.After;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertEquals;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ *
+ * Tests TSK Attribute apis.
+ * Under test are the following
+ * - File Attribute
+ *
+ */
+public class AttributeTest {
+
+	private static final Logger LOGGER = Logger.getLogger(AttributeTest.class.getName());
+
+	private static SleuthkitCase caseDB;
+
+	private final static String TEST_DB = "AttributeApiTest.db";
+
+
+	private static String dbPath = null;
+	private static FileSystem fs = null;
+
+	public AttributeTest (){
+
+	}
+
+	@BeforeClass
+	public static void setUpClass() {
+		String tempDirPath = System.getProperty("java.io.tmpdir");
+		try {
+			dbPath = Paths.get(tempDirPath, TEST_DB).toString();
+
+			// Delete the DB file, in case
+			java.io.File dbFile = new java.io.File(dbPath);
+			dbFile.delete();
+			if (dbFile.getParentFile() != null) {
+				dbFile.getParentFile().mkdirs();
+			}
+
+			// Create new case db
+			caseDB = SleuthkitCase.newCase(dbPath);
+
+			SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+
+			Image img = caseDB.addImage(TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_DETECT, 512, 1024, "", Collections.emptyList(), "America/NewYork", null, null, null, "first", trans);
+
+			fs = caseDB.addFileSystem(img.getId(), 0, TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_RAW, 0, 0, 0, 0, 0, "", trans);
+
+			trans.commit();
+
+
+			System.out.println("Attributes Test DB created at: " + dbPath);
+		} catch (TskCoreException ex) {
+			LOGGER.log(Level.SEVERE, "Failed to create new case", ex);
+		}
+	}
+
+
+	@AfterClass
+	public static void tearDownClass() {
+
+	}
+
+	@Before
+	public void setUp() {
+	}
+
+	@After
+	public void tearDown() {
+	}
+
+	@Test
+	public void fileAttributeTests() throws TskCoreException {
+
+		String testMD5 = "c67017ead6356b987b30536d35e8f562";
+		List<Attribute> fileAttributes = new ArrayList<>();
+		fileAttributes.add(new Attribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED), 1611233915l));
+
+		List<Attribute> fileAttributes2 = new ArrayList<>();
+		fileAttributes2.add(new Attribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID), "S-1-15-3443-2233"));
+
+
+		long dataSourceObjectId = fs.getDataSource().getId();
+		
+		SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+
+		// Add a root folder
+		FsContent root = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, null, false, fs, null, null, Collections.emptyList(), trans);
+
+		// Add a dir - no attributes 
+		FsContent windows = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "Windows", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, null, false, root, "S-1-5-80-956008885-3418522649-1831038044-1853292631-227147846", null, Collections.emptyList(), trans);
+
+		// Add dllhosts.exe file to the above dir
+		FsContent dllhosts = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "dllhosts.exe", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, testMD5, null, "Applicatione/Exe", true, windows, "S-1-5-32-544", null, fileAttributes, trans);
+
+		// add another no attribute file to the same folder
+		FsContent _nofile = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "nofile.exe", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, "Applicatione/Exe", true, windows, null, null, Collections.emptyList(), trans);
+		
+
+		// Add additional attributes to dllhosts file - within the same transaction. 
+		dllhosts.addAttributes(fileAttributes2, trans);
+		
+		long firstFileAttributeId  = fileAttributes.get(0).getId();
+		
+		assertEquals("Assert that the first file attribute has a db generated id",true, firstFileAttributeId > 0);
+		
+		trans.commit();
+ 
+		assertEquals(2, dllhosts.getAttributes().size());
+		assertEquals(firstFileAttributeId, dllhosts.getAttributes().get(0).getId());
+		
+		
+		// Lookup the file by Md5 and assert it has 2 attributes 
+		List<AbstractFile> matchingFiles = caseDB.findFilesByMd5(testMD5);
+		assertEquals(1, matchingFiles.size());
+		assertEquals(2, matchingFiles.get(0).getAttributes().size());
+		assertEquals(firstFileAttributeId, matchingFiles.get(0).getAttributes().get(0).getId());
+
+		List<AbstractFile> nofile = caseDB.findFiles(fs.getDataSource(), "nofile.exe");
+		assertEquals(1, nofile.size());
+		assertEquals(0, nofile.get(0).getAttributes().size());
+ 
+
+	}
+}
diff --git a/bindings/java/test/org/sleuthkit/datamodel/BottomUpTest.java b/bindings/java/test/org/sleuthkit/datamodel/BottomUpTest.java
index 1add49a71da697538db64aaebca88a9ebcac53a9..b588d85f2293788ca339d717e169ed9f54f2e487 100644
--- a/bindings/java/test/org/sleuthkit/datamodel/BottomUpTest.java
+++ b/bindings/java/test/org/sleuthkit/datamodel/BottomUpTest.java
@@ -86,13 +86,12 @@ public void testBottomUpDiff() {
 			title = title + DataModelTestSuite.BTTMUP + ".txt";
 			SleuthkitJNI.CaseDbHandle.AddImageProcess process = sk.makeAddImageProcess(timezone, true, false, "");
 			try {
-				process.run(imagePaths.toArray(new String[imagePaths.size()]));
+				process.run("Data Source ID", imagePaths.toArray(new String[imagePaths.size()]));
 			} catch (TskDataException ex) {
 				List<Exception> inp = new ArrayList<Exception>();
 				inp.add(ex);
 				DataModelTestSuite.writeExceptions(exFile, inp);
 			}
-			process.commit();
 
 			// open the "leaf file" generated by the TopDown test and verify that getParent() follows the same sequence
 			java.io.File lvs = new java.io.File(dbFile.getAbsolutePath() + java.io.File.separator + title);
diff --git a/bindings/java/test/org/sleuthkit/datamodel/CommunicationsManagerTest.java b/bindings/java/test/org/sleuthkit/datamodel/CommunicationsManagerTest.java
index a1c60a251ceffce30b9903dd899e13e18039f8b3..70ed724df9d64e4e6d59105b7cc8a5550a526c63 100644
--- a/bindings/java/test/org/sleuthkit/datamodel/CommunicationsManagerTest.java
+++ b/bindings/java/test/org/sleuthkit/datamodel/CommunicationsManagerTest.java
@@ -1408,8 +1408,7 @@ private static BlackboardArtifact addEmailMsgArtifact(String fromAddr, String to
 
 		try {
 			// Add Email artifact
-			bbart = abstractFile.newArtifact(TSK_EMAIL_MSG);
-			bbart.addAttributes(bbattributes);
+			bbart = abstractFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_EMAIL_MSG), bbattributes);
 
 			// Add account relationships
 			commsMgr.addRelationships(senderAccountInstance, recipientAccountInstances, bbart, MESSAGE, dateSent);
@@ -1463,17 +1462,20 @@ private static Set<String> findEmailAddresess(String input) {
 	private static void addCalllogArtifact(AccountFileInstance deviceAccount, String name, String phoneNumber, long date, long duration, String direction, AbstractFile abstractFile) {
 
 		try {
-			BlackboardArtifact bbart = abstractFile.newArtifact(TSK_CALLLOG); //create a call log and then add attributes from result set.
-			if (direction.equalsIgnoreCase("outgoing")) { //NON-NLS
-				bbart.addAttribute(new BlackboardAttribute(TSK_PHONE_NUMBER_TO, MODULE_NAME, phoneNumber));
-			} else { /// Covers INCOMING and MISSED
-				bbart.addAttribute(new BlackboardAttribute(TSK_PHONE_NUMBER_FROM, MODULE_NAME, phoneNumber));
-			}
-			bbart.addAttribute(new BlackboardAttribute(TSK_DATETIME_START, MODULE_NAME, date));
-			bbart.addAttribute(new BlackboardAttribute(TSK_DATETIME_END, MODULE_NAME, duration + date));
-			bbart.addAttribute(new BlackboardAttribute(TSK_DIRECTION, MODULE_NAME, direction));
-			bbart.addAttribute(new BlackboardAttribute(TSK_NAME, MODULE_NAME, name));
 
+			BlackboardAttribute attrPhonToOrFrom = direction.equalsIgnoreCase("outgoing")
+					? new BlackboardAttribute(TSK_PHONE_NUMBER_TO, MODULE_NAME, phoneNumber)
+					: new BlackboardAttribute(TSK_PHONE_NUMBER_FROM, MODULE_NAME, phoneNumber);
+
+			Collection<BlackboardAttribute> attributes = Arrays.asList(
+					attrPhonToOrFrom,
+					new BlackboardAttribute(TSK_DATETIME_START, MODULE_NAME, date),
+					new BlackboardAttribute(TSK_DATETIME_END, MODULE_NAME, duration + date),
+					new BlackboardAttribute(TSK_DIRECTION, MODULE_NAME, direction),
+					new BlackboardAttribute(TSK_NAME, MODULE_NAME, name)
+			);
+
+			BlackboardArtifact bbart = abstractFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG), attributes); //create a call log and then add attributes from result set.
 			// Create a phone number account for the phone number
 			AccountFileInstance phoneNumAccount = commsMgr.createAccountFileInstance(PHONE, phoneNumber, MODULE_NAME, abstractFile);
 			List<AccountFileInstance> accountInstanceList = new ArrayList<AccountFileInstance>();
@@ -1492,19 +1494,21 @@ private static void addCalllogArtifact(AccountFileInstance deviceAccount, String
 	private static void addMessageArtifact(AccountFileInstance deviceAccount, String phoneNumber, long date, String direction, String subject, String message, AbstractFile abstractFile) {
 
 		try {
-			BlackboardArtifact bbart = abstractFile.newArtifact(TSK_MESSAGE); //create Message artifact and then add attributes from result set.
-
-			if (direction.equalsIgnoreCase("incoming")) {
-				bbart.addAttribute(new BlackboardAttribute(TSK_PHONE_NUMBER_FROM, MODULE_NAME, phoneNumber));
-			} else {
-				bbart.addAttribute(new BlackboardAttribute(TSK_PHONE_NUMBER_TO, MODULE_NAME, phoneNumber));
-			}
+			BlackboardAttribute attrPhoneToOrFrom = (direction.equalsIgnoreCase("incoming"))
+					? new BlackboardAttribute(TSK_PHONE_NUMBER_FROM, MODULE_NAME, phoneNumber)
+					: new BlackboardAttribute(TSK_PHONE_NUMBER_TO, MODULE_NAME, phoneNumber);
+
+			Collection<BlackboardAttribute> attributes = Arrays.asList(
+					attrPhoneToOrFrom,
+					new BlackboardAttribute(TSK_DIRECTION, MODULE_NAME, direction),
+					new BlackboardAttribute(TSK_DATETIME, MODULE_NAME, date),
+					new BlackboardAttribute(TSK_SUBJECT, MODULE_NAME, subject),
+					new BlackboardAttribute(TSK_TEXT, MODULE_NAME, message),
+					new BlackboardAttribute(TSK_MESSAGE_TYPE, MODULE_NAME, "SMS")
+			);
 
-			bbart.addAttribute(new BlackboardAttribute(TSK_DIRECTION, MODULE_NAME, direction));
-			bbart.addAttribute(new BlackboardAttribute(TSK_DATETIME, MODULE_NAME, date));
-			bbart.addAttribute(new BlackboardAttribute(TSK_SUBJECT, MODULE_NAME, subject));
-			bbart.addAttribute(new BlackboardAttribute(TSK_TEXT, MODULE_NAME, message));
-			bbart.addAttribute(new BlackboardAttribute(TSK_MESSAGE_TYPE, MODULE_NAME, "SMS"));
+			//create Message artifact and then add attributes from result set.
+			BlackboardArtifact bbart = abstractFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_MESSAGE), attributes);
 
 			// Create a phone number account for the phone number
 			AccountFileInstance phoneNumAccount = commsMgr.createAccountFileInstance(PHONE, phoneNumber, MODULE_NAME, abstractFile);
@@ -1524,12 +1528,14 @@ private static void addMessageArtifact(AccountFileInstance deviceAccount, String
 	private static void addContactArtifact(AccountFileInstance deviceAccount, String name, String phoneNumber, String emailAddr, AbstractFile abstractFile) {
 
 		try {
-			BlackboardArtifact bbart = abstractFile.newArtifact(TSK_CONTACT); // create a CONTACT artifact
-
-			bbart.addAttribute(new BlackboardAttribute(TSK_NAME, MODULE_NAME, name));
+			Collection<BlackboardAttribute> attributes = Arrays.asList(
+					new BlackboardAttribute(TSK_NAME, MODULE_NAME, name),
+					new BlackboardAttribute(TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber),
+					new BlackboardAttribute(TSK_EMAIL, MODULE_NAME, emailAddr)
+			);
 
-			bbart.addAttribute(new BlackboardAttribute(TSK_PHONE_NUMBER, MODULE_NAME, phoneNumber));
-			bbart.addAttribute(new BlackboardAttribute(TSK_EMAIL, MODULE_NAME, emailAddr));
+			// create a CONTACT artifact
+			BlackboardArtifact bbart = abstractFile.newDataArtifact(new BlackboardArtifact.Type(BlackboardArtifact.ARTIFACT_TYPE.TSK_CONTACT), attributes);
 
 			// Create a phone number account for the phone number
 			AccountFileInstance phoneNumAccount = commsMgr.createAccountFileInstance(PHONE, phoneNumber, MODULE_NAME, abstractFile);
diff --git a/bindings/java/test/org/sleuthkit/datamodel/DataModelTestSuite.java b/bindings/java/test/org/sleuthkit/datamodel/DataModelTestSuite.java
index 2ec2da48f63072d83262cf107fb7c1375888d298..34076f5a289ce4c739056c388a1124b4eb6d87fb 100644
--- a/bindings/java/test/org/sleuthkit/datamodel/DataModelTestSuite.java
+++ b/bindings/java/test/org/sleuthkit/datamodel/DataModelTestSuite.java
@@ -45,7 +45,11 @@
 @Suite.SuiteClasses({ 
 	CommunicationsManagerTest.class, 
 	CaseDbSchemaVersionNumberTest.class,
-
+	AttributeTest.class,
+	ArtifactTest.class,
+	OsAccountTest.class,
+	TimelineEventTypesTest.class,
+	
 //  Note: these tests have dependencies on images being placed in the input folder: nps-2009-canon2-gen6, ntfs1-gen, and small2	
 //	org.sleuthkit.datamodel.TopDownTraversal.class, 
 //	org.sleuthkit.datamodel.SequentialTraversal.class, 
@@ -132,12 +136,11 @@ public static void createOutput(String outputPath, String tempDirPath, List<Stri
 			String timezone = "";
 			SleuthkitJNI.CaseDbHandle.AddImageProcess process = sk.makeAddImageProcess(timezone, true, false, "");
 			try {
-				process.run(imagePaths.toArray(new String[imagePaths.size()]));
+				process.run("Data Source ID", imagePaths.toArray(new String[imagePaths.size()]));
 			} catch (TskDataException ex) {
 				inp.add(ex);
 			}
 			writeExceptions(standardFile.getAbsolutePath(), inp);
-			process.commit();
 
 			// dump the database based on the specific test testType
 			OutputStreamWriter standardWriter = testType.traverse(sk, standardFile.getAbsolutePath());
diff --git a/bindings/java/test/org/sleuthkit/datamodel/OsAccountTest.java b/bindings/java/test/org/sleuthkit/datamodel/OsAccountTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..7c14f3e124b799ad10e4d468b58166d0f3021974
--- /dev/null
+++ b/bindings/java/test/org/sleuthkit/datamodel/OsAccountTest.java
@@ -0,0 +1,975 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import org.junit.After;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.assertFalse;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.sleuthkit.datamodel.OsAccount.OsAccountAttribute;
+import org.sleuthkit.datamodel.OsAccountManager.OsAccountUpdateResult;
+import org.sleuthkit.datamodel.OsAccountRealmManager.OsRealmUpdateResult;
+
+/**
+ *
+ * Tests OsAccount apis.
+ *
+ */
+public class OsAccountTest {
+	
+	private static final Logger LOGGER = Logger.getLogger(OsAccountTest.class.getName());
+
+	private static SleuthkitCase caseDB;
+
+	private final static String TEST_DB = "OsAccountApiTest.db";
+
+
+	private static String dbPath = null;
+	
+	private static Image image;
+	
+	private static FileSystem fs = null;
+
+	public OsAccountTest (){
+
+	}
+	
+	@BeforeClass
+	public static void setUpClass() {
+		String tempDirPath = System.getProperty("java.io.tmpdir");
+		try {
+			dbPath = Paths.get(tempDirPath, TEST_DB).toString();
+
+			// Delete the DB file, in case
+			java.io.File dbFile = new java.io.File(dbPath);
+			dbFile.delete();
+			if (dbFile.getParentFile() != null) {
+				dbFile.getParentFile().mkdirs();
+			}
+
+			// Create new case db
+			caseDB = SleuthkitCase.newCase(dbPath);
+
+			SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+
+			image = caseDB.addImage(TskData.TSK_IMG_TYPE_ENUM.TSK_IMG_TYPE_DETECT, 512, 1024, "", Collections.emptyList(), "America/NewYork", null, null, null, "first", trans);
+
+			fs = caseDB.addFileSystem(image.getId(), 0, TskData.TSK_FS_TYPE_ENUM.TSK_FS_TYPE_RAW, 0, 0, 0, 0, 0, "", trans);
+
+			trans.commit();
+
+
+			System.out.println("OsAccount Test DB created at: " + dbPath);
+		} catch (TskCoreException ex) {
+			LOGGER.log(Level.SEVERE, "Failed to create new case", ex);
+		}
+	}
+
+
+	@AfterClass
+	public static void tearDownClass() {
+
+	}
+	
+	@Before
+	public void setUp() {
+	}
+
+	@After
+	public void tearDown() {
+	}
+
+	@Test 
+	public void hostTests() throws TskCoreException {
+		
+		try {
+			String HOSTNAME1 = "host11";
+			
+			// Test: create a host
+			Host host1 = caseDB.getHostManager().newHost(HOSTNAME1);
+			assertEquals(host1.getName().equalsIgnoreCase(HOSTNAME1), true );
+			
+			
+			// Test: get a host we just created.
+			Optional<Host> optionalhost1 = caseDB.getHostManager().getHostByName(HOSTNAME1);
+			assertEquals(optionalhost1.isPresent(), true );
+			
+			
+			String HOSTNAME2 = "host22";
+			
+			// Get a host not yet created
+			Optional<Host> optionalhost2 = caseDB.getHostManager().getHostByName(HOSTNAME2);
+			assertEquals(optionalhost2.isPresent(), false );
+			
+			
+			// now create the second host
+			Host host2 = caseDB.getHostManager().newHost(HOSTNAME2);
+			assertEquals(host2.getName().equalsIgnoreCase(HOSTNAME2), true );
+			
+			
+			// now get it again, should be found this time
+			optionalhost2 = caseDB.getHostManager().getHostByName(HOSTNAME2);
+			assertEquals(optionalhost2.isPresent(), true);
+			
+			// create a host that already exists - should transperently return the existting host.
+			Host host2_2 = caseDB.getHostManager().newHost(HOSTNAME2);
+			assertEquals(host2_2.getName().equalsIgnoreCase(HOSTNAME2), true );
+			
+		}
+		catch(Exception ex) {
+			
+		}
+	
+	}
+	
+	@Test 
+	public void personTests() throws TskCoreException {
+		String personName1 = "John Doe";
+		String personName2 = "Jane Doe";
+		
+		org.sleuthkit.datamodel.PersonManager pm = caseDB.getPersonManager();
+		
+		Person p1 = pm.newPerson(personName1);
+		assertEquals(personName1.equals(p1.getName()), true);
+		
+		Optional<Person> p1opt = pm.getPerson(personName1.toLowerCase());
+		assertEquals(p1opt.isPresent(), true);
+		
+		p1.setName(personName2);
+		assertEquals(personName2.equals(p1.getName()), true);
+		
+		pm.updatePerson(p1);
+		Optional<Person> p2opt = pm.getPerson(personName2.toUpperCase());
+		assertEquals(p2opt.isPresent(), true);
+		
+		pm.deletePerson(p1.getName());
+		p2opt = pm.getPerson(personName2);
+		assertEquals(p2opt.isPresent(), false);
+	}
+		
+	@Test
+	public void mergeHostTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+		
+		// Host 1 will be merged into Host 2
+		String host1Name = "host1forHostMergeTest";
+		String host2Name = "host2forHostMergeTest";
+		Host host1 = caseDB.getHostManager().newHost(host1Name);
+		Host host2 = caseDB.getHostManager().newHost(host2Name);
+		
+		// Data source is originally associated with host1
+		org.sleuthkit.datamodel.SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+		DataSource ds = caseDB.addLocalFilesDataSource("devId", "pathToFiles", "EST", host1, trans);
+		trans.commit();
+        
+		String sid3 = "S-1-5-27-777777777-854245398-1060284298-7777";
+		String sid4 = "S-1-5-27-788888888-854245398-1060284298-8888";
+		String sid5 = "S-1-5-27-799999999-854245398-1060284298-9999";
+		String sid6 = "S-1-5-27-711111111-854245398-1060284298-1111";
+		String sid7 = "S-1-5-27-733333333-854245398-1060284298-3333";
+		String sid8 = "S-1-5-27-744444444-854245398-1060284298-4444";
+		
+		String realmName1 = "hostMergeRealm1";
+		String realmName2 = "hostMergeRealm2";
+		String realmName4 = "hostMergeRealm4";
+		String realmName5 = "hostMergeRealm5";
+		String realmName6 = "hostMergeRealm6";
+		String realmName7 = "hostMergeRealm7";
+		String realmName8 = "hostMergeRealm8";
+		
+		String realm8AcctName = "hostMergeUniqueRealm8Account";
+		String realm10AcctName = "hostMergeUniqueRealm10Account";
+		
+		// Save the created realms/accounts so we can query them later by object ID (the objects themselves will end up out-of-date)
+		OsAccountRealmManager realmManager = caseDB.getOsAccountRealmManager();
+		
+		// 1 - Should get moved
+		OsAccountRealm realm1 = realmManager.newWindowsRealm(null, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+		
+		// 2 - Should be merged into 5
+		OsAccountRealm realm2 = realmManager.newWindowsRealm(null, realmName2, host1, OsAccountRealm.RealmScope.LOCAL);
+		
+		// 3 - Should be merged into 5
+		OsAccountRealm realm3 = realmManager.newWindowsRealm(sid3, null, host1, OsAccountRealm.RealmScope.LOCAL); 
+		
+		// 4 - Should get moved - not merged into 6 since addrs are different
+		OsAccountRealm realm4 = realmManager.newWindowsRealm(sid4, realmName4, host1, OsAccountRealm.RealmScope.LOCAL); 
+
+		// 5 - 2 and 3 should get merged in
+		OsAccountRealm realm5 = realmManager.newWindowsRealm(sid3, realmName2, host2, OsAccountRealm.RealmScope.LOCAL);
+
+		// 6 - Should not get merged with 4
+		OsAccountRealm realm6 = realmManager.newWindowsRealm(sid5, realmName4, host2, OsAccountRealm.RealmScope.LOCAL);
+
+		// 7 - Should be unchanged
+		OsAccountRealm realm7 = realmManager.newWindowsRealm(null, realmName5, host2, OsAccountRealm.RealmScope.LOCAL);
+
+		// 8, 9, 10 - 8 should be merged into 9 and then 10 should be merged into 9
+		OsAccountRealm realm8 = realmManager.newWindowsRealm(null, realmName6, host2, OsAccountRealm.RealmScope.LOCAL); 
+		OsAccount realm8acct = caseDB.getOsAccountManager().newWindowsOsAccount(null, realm8AcctName, realmName6, host2, OsAccountRealm.RealmScope.LOCAL);
+		OsAccountRealm realm9 = realmManager.newWindowsRealm(sid6, null, host2, OsAccountRealm.RealmScope.LOCAL);
+		OsAccountRealm realm10 = realmManager.newWindowsRealm(sid6, realmName6, host1, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount realm10acct = caseDB.getOsAccountManager().newWindowsOsAccount(null, realm10AcctName, realmName6, host1, OsAccountRealm.RealmScope.LOCAL);
+
+		// 11, 12 - 11 should get merged into 12, adding the addr "sid8" to 12
+		OsAccountRealm realm11 = realmManager.newWindowsRealm(sid8, realmName7, host1, OsAccountRealm.RealmScope.LOCAL);
+		OsAccountRealm realm12 = realmManager.newWindowsRealm(null, realmName7, host2, OsAccountRealm.RealmScope.LOCAL);
+
+		// 13,14 - 13 should get merged into 14, name for 14 should not change
+		OsAccountRealm realm13 = realmManager.newWindowsRealm(sid7, "notRealm8", host1, OsAccountRealm.RealmScope.LOCAL);
+		OsAccountRealm realm14 = realmManager.newWindowsRealm(sid7, realmName8, host2, OsAccountRealm.RealmScope.LOCAL);
+		
+		// Do the merge
+		caseDB.getHostManager().mergeHosts(host1, host2);
+		
+		// Test the realms
+		try (org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection connection = caseDB.getConnection()) {
+			// Expected change: host is now host2
+			testUpdatedRealm(realm1, OsAccountRealm.RealmDbStatus.ACTIVE, realm1.getRealmAddr(), realm1.getRealmNames(), Optional.of(host2), connection);
+			
+			// Expected change: should be marked as merged
+			testUpdatedRealm(realm2, OsAccountRealm.RealmDbStatus.MERGED, null, null, null, connection);
+			
+			// Expected change: should be marked as merged
+			testUpdatedRealm(realm3, OsAccountRealm.RealmDbStatus.MERGED, null, null, null, connection);
+			
+			// Expected change: should still be active and be moved to host2
+			testUpdatedRealm(realm4, OsAccountRealm.RealmDbStatus.ACTIVE, realm4.getRealmAddr(), realm4.getRealmNames(), Optional.of(host2), connection);
+			
+			// Expected change: nothing
+			testUpdatedRealm(realm7, realm7.getDbStatus(), realm7.getRealmAddr(), realm7.getRealmNames(), realm7.getScopeHost(), connection);
+			
+			// Expected change: should be marked as merged
+			testUpdatedRealm(realm8, OsAccountRealm.RealmDbStatus.MERGED, null, null, null, connection);
+			
+			// Expected change: should have gained the name of realm 8
+			testUpdatedRealm(realm9, OsAccountRealm.RealmDbStatus.ACTIVE, realm9.getRealmAddr(), realm8.getRealmNames(), realm9.getScopeHost(), connection);
+			
+			// Expected change: should have gained the addr of realm 11
+			testUpdatedRealm(realm12, OsAccountRealm.RealmDbStatus.ACTIVE, realm11.getRealmAddr(), realm12.getRealmNames(), Optional.of(host2), connection);
+			
+			// "notRealm8" should not return any hits for either host (realm13 is marked as merged and the name was not copied to realm14)
+			Optional<OsAccountRealm> optRealm = realmManager.getRealmByName("notRealm8", host1, connection);
+			assertEquals(optRealm.isPresent(), false);
+			optRealm = realmManager.getRealmByName("notRealm8", host2, connection);
+			assertEquals(optRealm.isPresent(), false);
+			
+			// The realm8 and realm10 accounts should both be in realm9 now
+			OsAccount acct = caseDB.getOsAccountManager().getOsAccountByObjectId(realm8acct.getId(), connection);
+			assertEquals(acct.getRealmId() == realm9.getRealmId(), true);
+			acct = caseDB.getOsAccountManager().getOsAccountByObjectId(realm10acct.getId(), connection);
+			assertEquals(acct.getRealmId() == realm9.getRealmId(), true);
+		}
+			
+		// The data source should now reference host2
+		Host host = caseDB.getHostManager().getHostByDataSource(ds);
+		assertEquals(host.getHostId() == host2.getHostId(), true);
+
+		// We should get no results on a search for host1
+		Optional<Host> optHost = caseDB.getHostManager().getHostByName(host1Name);
+		assertEquals(optHost.isPresent(), false);
+		
+		// If we attempt to make a new host with the same name host1 had, we should get a new object Id
+		host = caseDB.getHostManager().newHost(host1Name);
+		assertEquals(host.getHostId() != host1.getHostId(), true);
+	}
+	
+	/**
+	 * Retrieve the new version of a realm from the database and compare with expected values.
+	 * Addr, name, and host can be passed in as null to skip comparison.
+	 */
+	private void testUpdatedRealm(OsAccountRealm origRealm, OsAccountRealm.RealmDbStatus expectedStatus, Optional<String> expectedAddr,
+			List<String> expectedNames, Optional<Host> expectedHost, org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection connection) throws TskCoreException {
+		
+		OsAccountRealm realm = caseDB.getOsAccountRealmManager().getRealmByRealmId(origRealm.getRealmId(), connection);
+		assertEquals(realm.getDbStatus().equals(expectedStatus), true);	
+		if (expectedAddr != null) {
+			assertEquals(realm.getRealmAddr().equals(expectedAddr), true);
+		}
+		if(expectedNames != null && !expectedNames.isEmpty()){
+			assertEquals(realm.getRealmNames().get(0).equals(expectedNames.get(0)), true);
+		}
+		if (expectedHost != null) {
+			assertEquals(realm.getScopeHost().equals(expectedHost), true);
+		}
+	}
+	
+	
+	@Test 
+	public void mergeRealmsTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+		Host host = caseDB.getHostManager().newHost("mergeTestHost");
+		
+		String destRealmName = "mergeTestDestRealm";
+		String srcRealmName = "mergeTestSourceRealm";
+		
+		String sid1 = "S-1-5-21-222222222-222222222-1060284298-2222";
+        String sid2 = "S-1-5-21-555555555-555555555-1060284298-5555";   
+		
+		String uniqueRealm2Name = "uniqueRealm2Account";
+		String matchingName = "matchingNameAccount";
+		String fullName1 = "FullName1";
+		long creationTime1 = 555;
+		
+		OsAccountRealm srcRealm = caseDB.getOsAccountRealmManager().newWindowsRealm(null, srcRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccountRealm destRealm = caseDB.getOsAccountRealmManager().newWindowsRealm(null, destRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		
+		OsAccount account1 = caseDB.getOsAccountManager().newWindowsOsAccount(null, "uniqueRealm1Account", destRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount account2 = caseDB.getOsAccountManager().newWindowsOsAccount(null, matchingName, destRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount account3 = caseDB.getOsAccountManager().newWindowsOsAccount(null, uniqueRealm2Name, srcRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount account4 = caseDB.getOsAccountManager().newWindowsOsAccount(null, matchingName, srcRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		
+		
+		OsAccountUpdateResult updateResult =  caseDB.getOsAccountManager().updateStandardOsAccountAttributes(account4, fullName1, null, null, creationTime1);
+		assertEquals(updateResult.getUpdateStatusCode(), OsAccountManager.OsAccountUpdateStatus.UPDATED);
+		assertEquals(updateResult.getUpdatedAccount().isPresent(), true);
+		account4 = updateResult.getUpdatedAccount().orElseThrow(() ->  new TskCoreException("Updated account not found."));
+		
+		
+		OsAccount account5 = caseDB.getOsAccountManager().newWindowsOsAccount(sid1, null, destRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount account6 = caseDB.getOsAccountManager().newWindowsOsAccount(sid1, null, srcRealmName, host, OsAccountRealm.RealmScope.LOCAL);  
+		OsAccount account7 = caseDB.getOsAccountManager().newWindowsOsAccount(sid2, null, destRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount account8 = caseDB.getOsAccountManager().newWindowsOsAccount(null, "nameForCombining", destRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount account9 = caseDB.getOsAccountManager().newWindowsOsAccount(sid2, "nameForCombining", srcRealmName, host, OsAccountRealm.RealmScope.LOCAL);
+		
+		// Test that we can currently get the source realm by name
+		Optional<OsAccountRealm> optRealm = caseDB.getOsAccountRealmManager().getWindowsRealm(null, srcRealmName, host);
+		assertEquals(optRealm.isPresent(), true);
+		
+		// Test that there is only one account associated with sid1
+		List<OsAccount> accounts = caseDB.getOsAccountManager().getOsAccounts().stream().filter(p -> p.getAddr().isPresent() && p.getAddr().get().equals(sid1)).collect(Collectors.toList());
+		assertEquals(accounts.size() == 1, true);
+		
+		// Expected results of the merge:
+		// - account 4 will be merged into account 2 (and extra fields should be copied)
+		// - account 6 will be merged into account 5
+		// - account 8 will be merged into account 7 (due to account 9 containing matches for both)
+		// - account 9 will be merged into account 7
+		SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+		caseDB.getOsAccountRealmManager().mergeRealms(srcRealm, destRealm, trans);
+		trans.commit();
+		
+		// Test that the source realm is no longer returned by a search by name
+		optRealm = caseDB.getOsAccountRealmManager().getWindowsRealm(null, srcRealmName, host);
+		assertEquals(optRealm.isPresent(), false);
+		
+		// Test that there is now only one account associated with sid1
+		accounts = caseDB.getOsAccountManager().getOsAccounts().stream().filter(p -> p.getAddr().isPresent() && p.getAddr().get().equals(sid1)).collect(Collectors.toList());
+		assertEquals(accounts.size() == 1, true);
+		
+		// Test that account 3 got moved into the destination realm
+		Optional<OsAccount> optAcct = caseDB.getOsAccountManager().getOsAccountByLoginName(uniqueRealm2Name, destRealm);
+		assertEquals(optAcct.isPresent(), true);
+		
+		// Test that data from account 4 was merged into account 2
+		optAcct = caseDB.getOsAccountManager().getOsAccountByLoginName(matchingName, destRealm);
+		assertEquals(optAcct.isPresent(), true);
+		if (optAcct.isPresent()) {
+			assertEquals(optAcct.get().getCreationTime().isPresent() &&  optAcct.get().getCreationTime().get() == creationTime1, true);
+			assertEquals(optAcct.get().getFullName().isPresent() && fullName1.equalsIgnoreCase(optAcct.get().getFullName().get()), true);
+		}
+	}
+	
+	@Test 
+	public void hostAddressTests() throws TskCoreException {
+		
+		
+		// lets add a file 
+		long dataSourceObjectId = fs.getDataSource().getId();
+		
+		SleuthkitCase.CaseDbTransaction trans = caseDB.beginTransaction();
+		
+		// Add a root folder
+		FsContent _root = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, null, false, fs, null, null, Collections.emptyList(), trans);
+
+		// Add a dir - no attributes 
+		FsContent _windows = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "Windows", 0, 0,
+				TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+				(short) 0, 200, 0, 0, 0, 0, null, null, null, false, _root, "S-1-5-80-956008885-3418522649-1831038044-1853292631-227147846", null, Collections.emptyList(), trans);
+
+		// add another no attribute file to same folder
+		FsContent _abcTextFile = caseDB.addFileSystemFile(dataSourceObjectId, fs.getId(), "abc.txt", 0, 0,
+					TskData.TSK_FS_ATTR_TYPE_ENUM.TSK_FS_ATTR_TYPE_DEFAULT, 0, TskData.TSK_FS_NAME_FLAG_ENUM.ALLOC,
+					(short) 0, 200, 0, 0, 0, 0, null, null, "Text/Plain", true, _windows, null, null, Collections.emptyList(), trans);
+		
+		trans.commit();
+			
+		
+		
+		String ipv4Str = "11.22.33.44";
+		String ipv6Str = "2001:0db8:85a3:0000:0000:8a2e:0370:6666";
+		String hostnameStr = "basis.com";
+		
+		// Test creation
+		HostAddress ipv4addr = caseDB.getHostAddressManager().newHostAddress(HostAddress.HostAddressType.IPV4, ipv4Str);
+		assertEquals(ipv4addr.getAddress().equalsIgnoreCase(ipv4Str), true);
+		
+		HostAddress addr2 = caseDB.getHostAddressManager().newHostAddress(HostAddress.HostAddressType.DNS_AUTO, ipv6Str);
+		assertEquals(addr2.getAddress().equalsIgnoreCase(ipv6Str), true);
+		assertEquals(HostAddress.HostAddressType.IPV6.equals(addr2.getAddressType()), true);
+		
+		HostAddress hostAddr = caseDB.getHostAddressManager().newHostAddress(HostAddress.HostAddressType.DNS_AUTO, hostnameStr);
+		assertEquals(hostAddr.getAddress().equalsIgnoreCase(hostnameStr), true);
+		assertEquals(HostAddress.HostAddressType.HOSTNAME.equals(hostAddr.getAddressType()), true);
+		
+		// Test get
+		Optional<HostAddress> addr4opt = caseDB.getHostAddressManager().getHostAddress(HostAddress.HostAddressType.IPV4, ipv4Str);
+		assertEquals(addr4opt.isPresent(), true);
+		
+		// Test host map
+		Host host = caseDB.getHostManager().newHost("TestHostAddress");
+		
+		trans = caseDB.beginTransaction();
+		DataSource ds = caseDB.addLocalFilesDataSource("devId", "pathToFiles", "EST", null, trans);
+		trans.commit();
+		
+		caseDB.getHostAddressManager().assignHostToAddress(host, ipv4addr, (long) 0, ds);
+		List<HostAddress> hostAddrs = caseDB.getHostAddressManager().getHostAddressesAssignedTo(host);
+		assertEquals(hostAddrs.size() == 1, true);
+		
+		// Test IP mapping
+		caseDB.getHostAddressManager().addHostNameAndIpMapping(hostAddr, ipv4addr, (long) 0, ds);
+		List<HostAddress> ipForHostSet = caseDB.getHostAddressManager().getIpAddress(hostAddr.getAddress());
+		assertEquals(ipForHostSet.size() == 1, true);
+		List<HostAddress> hostForIpSet = caseDB.getHostAddressManager().getHostNameByIp(ipv4addr.getAddress());
+		assertEquals(hostForIpSet.size() == 1, true);
+		
+		
+		// add address usage
+		caseDB.getHostAddressManager().addUsage(_abcTextFile, ipv4addr);
+		caseDB.getHostAddressManager().addUsage(_abcTextFile, addr2);
+		caseDB.getHostAddressManager().addUsage(_abcTextFile, hostAddr);
+		
+		//test get addressUsed methods
+		List<HostAddress> addrUsedByAbc = caseDB.getHostAddressManager().getHostAddressesUsedByContent(_abcTextFile);
+		assertEquals(addrUsedByAbc.size() == 3, true);
+		
+		List<HostAddress> addrUsedByRoot = caseDB.getHostAddressManager().getHostAddressesUsedByContent(_root);
+		assertEquals(addrUsedByRoot.isEmpty(), true);
+		
+		List<HostAddress> addrUsedOnDataSource = caseDB.getHostAddressManager().getHostAddressesUsedOnDataSource(_root.getDataSource());
+		assertEquals(addrUsedOnDataSource.size() == 3, true);
+		
+	}
+	
+	@Test
+	public void osAccountRealmTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+		
+		try {
+		// TEST: create a DOMAIN realm 
+		
+		String HOSTNAME1 = "host1";
+		Host host1 = caseDB.getHostManager().newHost(HOSTNAME1);
+			
+		String realmName1 = "basis";
+		String realmSID1 =  "S-1-5-21-1111111111-2222222222-3333333333";
+		String realmAddr1 = "S-1-5-21-1111111111-2222222222";	
+		
+		OsAccountRealm domainRealm1 = caseDB.getOsAccountRealmManager().newWindowsRealm(realmSID1, realmName1, host1, OsAccountRealm.RealmScope.DOMAIN);
+		
+		assertEquals(domainRealm1.getRealmNames().get(0).equalsIgnoreCase(realmName1), true );
+		assertEquals(domainRealm1.getScopeConfidence(), OsAccountRealm.ScopeConfidence.KNOWN);
+		assertEquals(domainRealm1.getRealmAddr().orElse(null), realmAddr1); 
+		
+	
+		//TEST: create a new LOCAL realm with a single host
+		String realmSID2 = "S-1-5-18-2033736216-1234567890-5432109876";
+		String realmAddr2 = "S-1-5-18-2033736216-1234567890";	
+		String realmName2 = "win-raman-abcd";
+		String hostName2 = "host2";
+		
+		Host host2 = caseDB.getHostManager().newHost(hostName2);
+		OsAccountRealm localRealm2 = caseDB.getOsAccountRealmManager().newWindowsRealm(realmSID2, null, host2, OsAccountRealm.RealmScope.LOCAL);
+		assertEquals(localRealm2.getRealmAddr().orElse("").equalsIgnoreCase(realmAddr2), true );
+		assertEquals(localRealm2.getScopeHost().orElse(null).getName().equalsIgnoreCase(hostName2), true);
+		
+		// update the a realm name on a existing realm.
+		OsRealmUpdateResult realmUpdateResult = caseDB.getOsAccountRealmManager().updateRealm(localRealm2, null, realmName2 );
+		assertEquals(realmUpdateResult.getUpdateStatus(), OsAccountRealmManager.OsRealmUpdateStatus.UPDATED );
+		assertTrue(realmUpdateResult.getUpdatedRealm().isPresent());
+		
+		OsAccountRealm updatedRealm2 = realmUpdateResult.getUpdatedRealm().get();
+		assertTrue(updatedRealm2.getRealmAddr().orElse("").equalsIgnoreCase(realmAddr2));
+		assertTrue(updatedRealm2.getRealmNames().get(0).equalsIgnoreCase(realmName2));
+		
+		
+		
+		// TEST get an existing DOMAIN realm - new SID  on a new host but same sub authority as previously created realm
+		String realmSID3 = realmAddr1 + "-88888888";
+		
+		String hostName3 = "host3";
+		Host host3 = caseDB.getHostManager().newHost(hostName3);
+		
+		// expect this to return realm1
+		Optional<OsAccountRealm> existingRealm3 = caseDB.getOsAccountRealmManager().getWindowsRealm(realmSID3, null, host3); 
+		assertEquals(existingRealm3.isPresent(), true);
+		assertEquals(existingRealm3.get().getRealmAddr().orElse("").equalsIgnoreCase(realmAddr1), true );
+		assertEquals(existingRealm3.get().getRealmNames().get(0).equalsIgnoreCase(realmName1), true );
+		
+		
+		// TEST get a existing LOCAL realm by addr, BUT on a new referring host.
+		String hostName4 = "host4";
+		Host host4 = caseDB.getHostManager().newHost(hostName4);
+		
+		// Although the realm exists with this addr, it should  NOT match since the host is different from what the realm was created with
+		Optional<OsAccountRealm> realm4 = caseDB.getOsAccountRealmManager().getWindowsRealm(realmSID2, null, host4);
+		
+		assertEquals(realm4.isPresent(), false);
+				
+		}
+		finally {
+
+		}
+		
+		
+	}
+	
+	@Test
+	public void basicOsAccountTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+
+		try {
+			//String ownerUid1 = "S-1-5-32-544"; // special short SIDS not handled yet
+			
+			// Create an account in a local scoped realm.
+			
+			String ownerUid1 = "S-1-5-21-111111111-222222222-3333333333-1001";
+			String loginName1 = "jay";
+			String realmName1 = "local";
+			
+			String hostname1 = "host1";
+			Host host1 = caseDB.getHostManager().newHost(hostname1);
+			
+			//OsAccountRealm localRealm1 = caseDB.getOsAccountRealmManager().newWindowsRealm(ownerUid1, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+			OsAccount osAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, loginName1, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+			
+			assertEquals(osAccount1.getAddr().orElse("").equalsIgnoreCase(ownerUid1), true);
+			assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount1.getRealmId()).getRealmNames().get(0).equalsIgnoreCase(realmName1), true);
+			
+			// Create another account - with same SID on the same host - should return the existing account
+			String loginName11 = "BlueJay";
+			String realmName11 = "DESKTOP-9TO5";
+			OsAccount osAccount11 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, loginName11, realmName11, host1, OsAccountRealm.RealmScope.DOMAIN);
+			
+			// account should be the same as osAccount1
+			assertEquals(osAccount11.getAddr().orElse("").equalsIgnoreCase(ownerUid1), true);	
+			assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount11.getRealmId()).getRealmNames().get(0).equalsIgnoreCase(realmName1), true);
+			assertEquals(osAccount11.getLoginName().orElse("").equalsIgnoreCase(loginName1), true);	
+			
+			
+			// Let's update osAccount1
+			String fullName1 = "Johnny Depp";
+			Long creationTime1 = 1611858618L;
+			
+			
+			OsAccountUpdateResult updateResult = caseDB.getOsAccountManager().updateStandardOsAccountAttributes(osAccount1, fullName1, null, null, creationTime1 );
+			assertEquals(updateResult.getUpdateStatusCode(), OsAccountManager.OsAccountUpdateStatus.UPDATED);
+			assertTrue(updateResult.getUpdatedAccount().isPresent());
+			
+			osAccount1 = updateResult.getUpdatedAccount().orElseThrow(() -> new TskCoreException("Updated account not found"));
+			assertEquals(osAccount1.getCreationTime().orElse(null), creationTime1);
+			assertEquals(osAccount1.getFullName().orElse(null).equalsIgnoreCase(fullName1), true );
+			
+			
+			// now try and create osAccount1 again - it should return the existing account
+			OsAccount osAccount1_copy1 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, null, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+			
+			
+			assertEquals(osAccount1_copy1.getAddr().orElse("").equalsIgnoreCase(ownerUid1), true);
+			assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount1_copy1.getRealmId()).getRealmNames().get(0).equalsIgnoreCase(realmName1), true);
+			
+			
+			assertEquals(osAccount1_copy1.getFullName().orElse("").equalsIgnoreCase(fullName1), true);
+			assertEquals(osAccount1.getCreationTime().orElse(null), creationTime1);
+			
+			
+			// Test that getContentById() returns the same account
+			Content content = caseDB.getContentById(osAccount1.getId());
+			assertEquals(content != null, true);
+			assertEquals(content instanceof OsAccount, true);
+			OsAccount osAccount1_copy2 = (OsAccount) content;
+			assertEquals(osAccount1_copy2.getAddr().orElse("").equalsIgnoreCase(ownerUid1), true);
+			
+			
+			
+			// Create two new accounts on a new domain realm
+			String ownerUid2 = "S-1-5-21-725345543-854245398-1060284298-1003";
+			String ownerUid3 = "S-1-5-21-725345543-854245398-1060284298-1004";
+	
+			String realmName2 = "basis";
+			
+			String hostname2 = "host2";
+			String hostname3 = "host3";
+			Host host2 = caseDB.getHostManager().newHost(hostname2);
+			Host host3 = caseDB.getHostManager().newHost(hostname3);
+		
+			OsAccountRealm domainRealm1 = caseDB.getOsAccountRealmManager().newWindowsRealm(ownerUid2, realmName2, host2, OsAccountRealm.RealmScope.DOMAIN);
+		
+			// create accounts in this domain scoped realm
+			OsAccount osAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid2, null, realmName2, host2, OsAccountRealm.RealmScope.DOMAIN);
+			OsAccount osAccount3 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid3, null, realmName2, host3, OsAccountRealm.RealmScope.DOMAIN);
+			
+			assertEquals(osAccount2.getAddr().orElse("").equalsIgnoreCase(ownerUid2), true);
+			assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount2.getRealmId()).getRealmNames().get(0).equalsIgnoreCase(realmName2), true);
+			
+			
+			assertEquals(osAccount3.getAddr().orElse("").equalsIgnoreCase(ownerUid3), true);
+			assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount3.getRealmId()).getRealmNames().get(0).equalsIgnoreCase(realmName2), true);
+			
+		}
+		
+		finally {
+			
+		}
+
+	}
+	
+	
+	@Test
+	public void windowsSpecialAccountTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+
+		try {
+			
+			String SPECIAL_WINDOWS_REALM_ADDR = "SPECIAL_WINDOWS_ACCOUNTS";
+			
+			
+			// TEST create accounts with special SIDs on host2
+			{
+				String hostname2 = "host222";
+				Host host2 = caseDB.getHostManager().newHost(hostname2);
+
+				String specialSid1 = "S-1-5-18";
+				String specialSid2 = "S-1-5-19";
+				String specialSid3 = "S-1-5-20";
+
+				OsAccount specialAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid1, null, null, host2, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid2, null, null, host2, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount3 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid3, null, null, host2, OsAccountRealm.RealmScope.UNKNOWN);
+
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount1.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount2.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount3.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+			}
+			
+			
+			// TEST create accounts with special SIDs on host3 - should create their own realm 
+			{
+				String hostname3 = "host333";
+				Host host3 = caseDB.getHostManager().newHost(hostname3);
+
+				String specialSid1 = "S-1-5-18";
+				String specialSid2 = "S-1-5-19";
+				String specialSid3 = "S-1-5-20";
+
+				OsAccount specialAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid1, null, null, host3, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid2, null, null, host3, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount3 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid3, null, null, host3, OsAccountRealm.RealmScope.UNKNOWN);
+
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount1.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount2.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount3.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				
+				// verify a new local realm with host3 was created for these account even they've been seen previously on another host
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount1.getRealmId()).getScopeHost().orElse(null).getName().equalsIgnoreCase(hostname3), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount1.getRealmId()).getScopeHost().orElse(null).getName().equalsIgnoreCase(hostname3), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount1.getRealmId()).getScopeHost().orElse(null).getName().equalsIgnoreCase(hostname3), true);
+			}
+
+			
+			// Test some other special account.
+			{
+				String hostname4 = "host444";
+				Host host4 = caseDB.getHostManager().newHost(hostname4);
+
+				String specialSid1 = "S-1-5-80-3696737894-3623014651-202832235-645492566-13622391";
+				String specialSid2 = "S-1-5-82-4003674586-223046494-4022293810-2417516693-151509167";
+				String specialSid3 = "S-1-5-90-0-2";
+				String specialSid4 = "S-1-5-96-0-3";
+				
+
+				OsAccount specialAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid1, null, null, host4, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid2, null, null, host4, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount3 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid3, null, null, host4, OsAccountRealm.RealmScope.UNKNOWN);
+				OsAccount specialAccount4 = caseDB.getOsAccountManager().newWindowsOsAccount(specialSid4, null, null, host4, OsAccountRealm.RealmScope.UNKNOWN);
+				
+
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount1.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount2.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount3.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				assertEquals(caseDB.getOsAccountRealmManager().getRealmByRealmId(specialAccount4.getRealmId()).getRealmAddr().orElse("").equalsIgnoreCase(SPECIAL_WINDOWS_REALM_ADDR), true);
+				
+				
+			}
+			
+			// TEST: create accounts with a invalid user SIDs - these should generate an exception
+			{
+				String hostname5 = "host555";
+				String realmName5 = "realmName555";
+				Host host5 = caseDB.getHostManager().newHost(hostname5);
+
+				try {
+					String sid1 = "S-1-5-32-544"; // builtin Administrators
+					OsAccount osAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(sid1, null, realmName5, host5, OsAccountRealm.RealmScope.UNKNOWN);
+					
+					// above should raise an exception
+					assertEquals(true, false);
+				}
+				catch (OsAccountManager.NotUserSIDException ex) {
+					// continue
+				}
+				
+				try {
+					String sid2 = "S-1-5-21-725345543-854245398-1060284298-512"; //  domain admins group
+					OsAccount osAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(sid2, null, realmName5, host5, OsAccountRealm.RealmScope.UNKNOWN);
+					
+					// above should raise an exception
+					assertEquals(true, false);
+				}
+				catch (OsAccountManager.NotUserSIDException ex) {
+					// continue
+				}
+				
+				try {
+					String sid3 = "S-1-1-0"; //  Everyone
+					OsAccount osAccount3 = caseDB.getOsAccountManager().newWindowsOsAccount(sid3, null, realmName5, host5, OsAccountRealm.RealmScope.UNKNOWN);
+					
+					// above should raise an exception
+					assertEquals(true, false);
+				}
+				catch (OsAccountManager.NotUserSIDException ex) {
+					// continue
+				}
+
+			}
+		}
+		
+		finally {
+			
+		}
+
+	}
+	
+	
+	@Test
+	public void osAccountInstanceTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+
+		String ownerUid1 = "S-1-5-21-111111111-222222222-3333333333-0001";
+		String realmName1 = "realm1111";
+
+		String hostname1 = "host1111";
+		Host host1 = caseDB.getHostManager().newHost(hostname1);
+
+		OsAccountRealm localRealm1 = caseDB.getOsAccountRealmManager().newWindowsRealm(ownerUid1, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+		OsAccount osAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, null, realmName1, host1, OsAccountRealm.RealmScope.LOCAL);
+
+		// Test: add an instance
+		caseDB.getOsAccountManager().newOsAccountInstance(osAccount1, image, OsAccountInstance.OsAccountInstanceType.LAUNCHED);
+
+		// Test: add an existing instance - should be a no-op.
+		caseDB.getOsAccountManager().newOsAccountInstance(osAccount1, image, OsAccountInstance.OsAccountInstanceType.LAUNCHED);
+
+		// Test: create account instance on a new host
+		String hostname2 = "host2222";
+		Host host2 = caseDB.getHostManager().newHost(hostname2);
+		caseDB.getOsAccountManager().newOsAccountInstance(osAccount1, image, OsAccountInstance.OsAccountInstanceType.LAUNCHED);
+	
+		
+		List<OsAccountAttribute> accountAttributes = new ArrayList<>();
+		Long resetTime1 = 1611859999L;	
+		
+		// TBD: perhaps add some files to the case and then use one of the files as the source of attributes.
+		
+		OsAccountAttribute attrib1 = osAccount1.new OsAccountAttribute(caseDB.getAttributeType(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_PASSWORD_RESET.getTypeID()), resetTime1, osAccount1, null, image);
+		accountAttributes.add(attrib1);
+		
+		String hint = "HINT";
+		OsAccountAttribute attrib2 = osAccount1.new OsAccountAttribute(caseDB.getAttributeType(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PASSWORD_HINT.getTypeID()), hint, osAccount1, host2, image);
+		accountAttributes.add(attrib2);
+		
+		// add attributes to account.
+		caseDB.getOsAccountManager().addExtendedOsAccountAttributes(osAccount1, accountAttributes);
+		
+		// now get the account with same sid,  and get its attribuites and verify.
+		Optional<OsAccount> existingAccount1 = caseDB.getOsAccountManager().getOsAccountByAddr(osAccount1.getAddr().get(), caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount1.getRealmId()));
+		List<OsAccountAttribute> existingAccountAttribs  = existingAccount1.get().getExtendedOsAccountAttributes();
+		
+		
+		assertEquals(existingAccountAttribs.size(), 2);
+		for (OsAccountAttribute attr: existingAccountAttribs) {
+			if (attr.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_PASSWORD_RESET.getTypeID()) {
+				assertEquals(attr.getValueLong(), resetTime1.longValue() );
+				
+			} else if (attr.getAttributeType().getTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PASSWORD_HINT.getTypeID()) {
+				assertEquals(attr.getValueString().equalsIgnoreCase(hint), true );
+			}
+			
+		}
+		
+		
+	}
+	
+	
+	@Test
+	public void windowsAccountRealmUpdateTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+
+		String ownerUid1 = "S-1-5-21-111111111-222222222-4444444444-0001";
+		//String realmName1 = "realm4444";
+
+		String hostname1 = "host4444";
+		Host host1 = caseDB.getHostManager().newHost(hostname1);
+
+		
+		// create an account, a realm should be created implicitly with just the SID, and no name
+		
+		OsAccount osAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, null, null, host1, OsAccountRealm.RealmScope.LOCAL);
+		
+		String realmAddr1 = "S-1-5-21-111111111-222222222-4444444444";
+		OsAccountRealm realm1 = caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount1.getRealmId());
+		assertEquals(realm1.getRealmAddr().orElse("").equalsIgnoreCase(realmAddr1), true );
+		assertEquals(realm1.getRealmNames().isEmpty(), true);	//
+		
+		
+		
+		// create a 2nd account with the same realmaddr, along with a known realm name
+		String ownerUid2 = "S-1-5-21-111111111-222222222-4444444444-0002";
+		
+		String realmName2 = "realm4444";
+		OsAccount osAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid2, null, realmName2, host1, OsAccountRealm.RealmScope.LOCAL);
+		
+		// Account 2 should have the same realm by addr, but it's realm name should now get updated.
+		OsAccountRealm realm2 = caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount2.getRealmId());
+		
+		assertEquals(osAccount1.getRealmId(), osAccount2.getRealmId() );
+		assertEquals(realm2.getRealmAddr().orElse("").equalsIgnoreCase(realmAddr1), true );
+		assertEquals(realm2.getRealmNames().size(), 1 );	// should have 1 name
+		assertEquals(realm2.getRealmNames().get(0).equalsIgnoreCase(realmName2), true );
+		
+		
+		// Create an account with  known realm name but no known addr
+		String hostname3 = "host4444_3";
+		Host host3 = caseDB.getHostManager().newHost(hostname3);
+		
+		String realmName3 = "realm4444_3";
+		String loginName3 = "User4444_3";
+		OsAccount osAccount3 = caseDB.getOsAccountManager().newWindowsOsAccount(null, loginName3, realmName3, host3, OsAccountRealm.RealmScope.DOMAIN);
+		
+		OsAccountRealm realm3 = caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount3.getRealmId());
+		assertEquals(realm3.getRealmAddr().orElse("").equalsIgnoreCase(""), true );
+		assertEquals(realm3.getRealmNames().size(), 1 );	// should have 1 name
+		assertEquals(realm3.getRealmNames().get(0).equalsIgnoreCase(realmName3), true );
+		
+		
+		// add a second user with same realmname and a known addr - expect the realm to get updated
+		String loginName4 = "User4444_4";
+		String ownerSid4 =  "S-1-5-21-111111111-444444444-4444444444-0001";
+	    String realm4Addr = "S-1-5-21-111111111-444444444-4444444444";
+		
+		String hostname4 = "host4444_4";
+		Host host4 = caseDB.getHostManager().newHost(hostname4);
+		
+		OsAccount osAccount4 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerSid4, loginName4, realmName3, host4, OsAccountRealm.RealmScope.DOMAIN);
+		
+		// realm4 should be the same as realm3 but the addr should be updaed now
+		OsAccountRealm realm4 = caseDB.getOsAccountRealmManager().getRealmByRealmId(osAccount4.getRealmId());
+		assertEquals(osAccount3.getRealmId(), osAccount4.getRealmId() );
+		assertEquals(realm4.getRealmAddr().orElse("").equalsIgnoreCase(realm4Addr), true );
+		assertEquals(realm4.getRealmNames().size(), 1 );	// should have 1 name
+		assertEquals(realm4.getRealmNames().get(0).equalsIgnoreCase(realmName3), true );
+		
+	
+	}
+	
+	
+	@Test
+	public void windowsAccountUpdateTests() throws TskCoreException, OsAccountManager.NotUserSIDException {
+
+		
+		String hostname1 = "host55555";
+		Host host1 = caseDB.getHostManager().newHost(hostname1);
+		
+	
+		// Test 1: create an account with a SID alone. Then update the loginName.
+		
+		String ownerUid1 = "S-1-5-21-111111111-222222222-555555555-0001";
+		OsAccount osAccount1 = caseDB.getOsAccountManager().newWindowsOsAccount(ownerUid1, null, null, host1, OsAccountRealm.RealmScope.DOMAIN);
+		
+		
+		// now update the account login name
+		String loginname1 = "jbravo";
+		
+		OsAccountUpdateResult updateResult = caseDB.getOsAccountManager().updateCoreWindowsOsAccountAttributes(osAccount1, null, loginname1, null, host1);
+		assertEquals(updateResult.getUpdateStatusCode(), OsAccountManager.OsAccountUpdateStatus.UPDATED);
+		assertEquals(updateResult.getUpdatedAccount().isPresent(), true);
+		OsAccount updatedAccount = updateResult.getUpdatedAccount().orElseThrow(() ->  new TskCoreException("Updated account not found."));
+		
+		// verify that account has both addr and loginName, and that signature is the addr
+		assertTrue(updatedAccount.getAddr().orElse("").equalsIgnoreCase(ownerUid1));
+		assertTrue(updatedAccount.getLoginName().orElse("").equalsIgnoreCase(loginname1));
+		assertTrue(updatedAccount.getSignature().equalsIgnoreCase(ownerUid1));	// account signature should not change
+		
+		
+		String realmAddr1 = "S-1-5-21-111111111-222222222-555555555";
+		String realmSignature1 = realmAddr1 + "_DOMAIN";	// for a domain realm - signature is sid/name + "_DOMAIN"
+		
+		OsAccountRealm realm1 = caseDB.getOsAccountRealmManager().getRealmByRealmId(updatedAccount.getRealmId());
+		assertTrue(realm1.getRealmAddr().orElse("").equalsIgnoreCase(realmAddr1));
+		assertTrue(realm1.getSignature().equalsIgnoreCase(realmSignature1));	
+		
+		
+		// TBD Test2: create an account with realmName/loginname and then update the SID
+		
+		String loginname2 = "janeB";
+		String realmName2 = "realm55555";
+		OsAccount osAccount2 = caseDB.getOsAccountManager().newWindowsOsAccount(null, loginname2, realmName2, host1, OsAccountRealm.RealmScope.DOMAIN);
+		
+		assertFalse(osAccount2.getAddr().isPresent());
+		assertTrue(osAccount2.getLoginName().orElse("").equalsIgnoreCase(loginname2));
+		assertTrue(osAccount2.getSignature().equalsIgnoreCase(loginname2));	// account signature should be the login name
+		
+		// now update the account SID
+		String ownerUid2 = "S-1-5-21-111111111-222222222-555555555-0007";
+		OsAccountUpdateResult updateResult2 = caseDB.getOsAccountManager().updateCoreWindowsOsAccountAttributes(osAccount2, ownerUid2, null, realmName2, host1);
+		assertEquals(updateResult2.getUpdateStatusCode(), OsAccountManager.OsAccountUpdateStatus.UPDATED);
+		assertEquals(updateResult2.getUpdatedAccount().isPresent(), true);
+		OsAccount updatedAccount2 = updateResult2.getUpdatedAccount().orElseThrow(() ->  new TskCoreException("Updated account not found."));
+		
+		// verify that account has both addr and loginName, and that signature is the addr
+		assertTrue(updatedAccount2.getAddr().orElse("").equalsIgnoreCase(ownerUid2));
+		assertTrue(updatedAccount2.getLoginName().orElse("").equalsIgnoreCase(loginname2));
+		assertTrue(updatedAccount2.getSignature().equalsIgnoreCase(ownerUid2));	// account signature should now be addr
+		
+		// RAMAN TODO: CT-4284
+//		OsAccountRealm realm2 = caseDB.getOsAccountRealmManager().getRealmByRealmId(updatedAccount2.getRealmId());
+//		assertTrue(realm2.getRealmAddr().orElse("").equalsIgnoreCase(realmAddr1));
+//		assertTrue(realm2.getSignature().equalsIgnoreCase(realmSignature1));	
+	}
+	
+	
+}
diff --git a/bindings/java/test/org/sleuthkit/datamodel/PublicTagName.java b/bindings/java/test/org/sleuthkit/datamodel/PublicTagName.java
index 9a777aedd702246d7aa92515389927681fe9932d..0161813db0b263f6c6cfd29c26bc63a298046ff7 100644
--- a/bindings/java/test/org/sleuthkit/datamodel/PublicTagName.java
+++ b/bindings/java/test/org/sleuthkit/datamodel/PublicTagName.java
@@ -7,6 +7,8 @@
  */
 public class PublicTagName extends TagName {
 
+	private static final long serialVersionUID = 1L;
+
 	public PublicTagName(long id, String displayName, String description, HTML_COLOR color, TskData.FileKnown knownStatus, long tagSetId, int rank) {
 		super(id, displayName, description, color, knownStatus, tagSetId, rank);
 	}
diff --git a/bindings/java/test/org/sleuthkit/datamodel/TimelineEventTypesTest.java b/bindings/java/test/org/sleuthkit/datamodel/TimelineEventTypesTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..4ae85e6b1d3eed21041682173965b1bfcc29f7a6
--- /dev/null
+++ b/bindings/java/test/org/sleuthkit/datamodel/TimelineEventTypesTest.java
@@ -0,0 +1,200 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.Collections;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.logging.Logger;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.junit.After;
+import org.junit.AfterClass;
+import static org.junit.Assert.assertEquals;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
+import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
+
+/**
+ *
+ * Tests to make sure timeline event types handle all artifacts with time-valued
+ * attributes.
+ *
+ */
+public class TimelineEventTypesTest {
+
+	private static final Logger LOGGER = Logger.getLogger(TimelineEventTypesTest.class.getName());
+
+	public TimelineEventTypesTest() {
+
+	}
+
+	@BeforeClass
+	public static void setUpClass() {
+	}
+
+	@AfterClass
+	public static void tearDownClass() {
+	}
+
+	@Before
+	public void setUp() {
+	}
+
+	@After
+	public void tearDown() {
+	}
+
+	/**
+	 * Ensure all event display names exist.
+	 */
+	@Test
+	public void testEventIdentifiersUnique() {
+		Set<String> identifiers = new HashSet<>();
+		Set<String> repeats = new HashSet<>();
+		Set<TimelineEventArtifactTypeImpl> nullDisplayNames = new HashSet<>();
+
+		getArtifactEvents().forEach((artEv) -> {
+			if (artEv.getDisplayName() == null) {
+				nullDisplayNames.add(artEv);
+			} else if (!identifiers.add(artEv.getDisplayName())) {
+				repeats.add(artEv.getDisplayName());
+			}
+		});
+
+		assertEquals("Expected no null display names", 0, nullDisplayNames.size());
+		assertEquals("Expected no repeats but received: " + repeats.stream().collect(Collectors.joining(", ")), 0, repeats.size());
+	}
+
+	/**
+	 * Ensure all artifacts with time-valued attributes are represented without
+	 * duplicates.
+	 */
+	@Test
+	public void testArtifactAttributeEvents() {
+		// this was generated based off of the artifact_catalog.dox
+		Map<ARTIFACT_TYPE, Set<ATTRIBUTE_TYPE>> mapping = new HashMap<>();
+		mapping.put(ARTIFACT_TYPE.TSK_PROG_NOTIFICATIONS, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_SEARCH_QUERY, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED));
+		mapping.put(ARTIFACT_TYPE.TSK_RECENT_OBJECT, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED));
+		mapping.put(ARTIFACT_TYPE.TSK_SCREEN_SHOTS, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_BLUETOOTH_ADAPTER, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_CALENDAR_ENTRY, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_END, ATTRIBUTE_TYPE.TSK_DATETIME_START));
+		mapping.put(ARTIFACT_TYPE.TSK_DEVICE_ATTACHED, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_SERVICE_ACCOUNT, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED));
+		mapping.put(ARTIFACT_TYPE.TSK_DELETED_PROG, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_GPS_LAST_KNOWN_LOCATION, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_USER_DEVICE_EVENT, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_END, ATTRIBUTE_TYPE.TSK_DATETIME_START));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_HISTORY, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, ATTRIBUTE_TYPE.TSK_DATETIME_CREATED));
+		mapping.put(ARTIFACT_TYPE.TSK_OS_INFO, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_GPS_ROUTE, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_MESSAGE, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_GPS_BOOKMARK, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_GPS_SEARCH, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, ATTRIBUTE_TYPE.TSK_DATETIME_CREATED));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_CACHE, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED));
+		mapping.put(ARTIFACT_TYPE.TSK_WIFI_NETWORK, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED));
+		mapping.put(ARTIFACT_TYPE.TSK_METADATA_EXIF, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_COOKIE, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ATTRIBUTE_TYPE.TSK_DATETIME_END));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_DOWNLOAD, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED));
+		mapping.put(ARTIFACT_TYPE.TSK_TL_EVENT, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_METADATA, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, ATTRIBUTE_TYPE.TSK_DATETIME_MODIFIED, ATTRIBUTE_TYPE.TSK_LAST_PRINTED_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_WEB_BOOKMARK, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_CREATED));
+		mapping.put(ARTIFACT_TYPE.TSK_BLUETOOTH_PAIRING, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME, ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED));
+		mapping.put(ARTIFACT_TYPE.TSK_INSTALLED_PROG, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+		mapping.put(ARTIFACT_TYPE.TSK_BACKUP_EVENT, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_END, ATTRIBUTE_TYPE.TSK_DATETIME_START));
+		mapping.put(ARTIFACT_TYPE.TSK_CALLLOG, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME_END, ATTRIBUTE_TYPE.TSK_DATETIME_START));
+		mapping.put(ARTIFACT_TYPE.TSK_PROG_RUN, EnumSet.of(ATTRIBUTE_TYPE.TSK_DATETIME));
+
+		Map<Integer, ARTIFACT_TYPE> artTypeIds = Stream.of(ARTIFACT_TYPE.values())
+				.collect(Collectors.toMap((art) -> art.getTypeID(), (art) -> art));
+
+		Map<Integer, ATTRIBUTE_TYPE> attrTypeIds = Stream.of(ATTRIBUTE_TYPE.values())
+				.collect(Collectors.toMap((attr) -> attr.getTypeID(), (attr) -> attr));
+
+		Map<ARTIFACT_TYPE, Set<ATTRIBUTE_TYPE>> duplicates = new HashMap<>();
+		Map<ARTIFACT_TYPE, Set<ATTRIBUTE_TYPE>> timelineEventArtifacts = new HashMap<>();
+
+		getArtifactEvents().forEach((artEv) -> {
+			ARTIFACT_TYPE currArtType = artTypeIds.get(artEv.getArtifactTypeID());
+			ATTRIBUTE_TYPE curAttrType = attrTypeIds.get(artEv.getDateTimeAttributeType().getTypeID());
+			if (currArtType != null && curAttrType != null) {
+				// if adding for this artifact's set of attributes results in duplicate
+				if (!timelineEventArtifacts.computeIfAbsent(currArtType, (artType) -> new HashSet<>()).add(curAttrType)
+						&& !currArtType.equals(ARTIFACT_TYPE.TSK_TL_EVENT)) {
+					duplicates.computeIfAbsent(currArtType, (artType) -> new HashSet<>()).add(curAttrType);
+				}
+			}
+		});
+
+		Map<ARTIFACT_TYPE, Set<ATTRIBUTE_TYPE>> notRepresentedInTimeline = new HashMap<>();
+		for (Entry<ARTIFACT_TYPE, Set<ATTRIBUTE_TYPE>> e : mapping.entrySet()) {
+			Set<ATTRIBUTE_TYPE> bbAttrs = new HashSet<>(e.getValue());
+			Set<ATTRIBUTE_TYPE> timelineEvtAttrs = timelineEventArtifacts.get(e.getKey());
+			timelineEvtAttrs = timelineEvtAttrs == null ? Collections.emptySet() : timelineEvtAttrs;
+
+			bbAttrs.removeAll(timelineEvtAttrs);
+			if (bbAttrs.size() > 0) {
+				notRepresentedInTimeline.put(e.getKey(), bbAttrs);
+			}
+		}
+
+		assertEquals("Expected all time valued attributes represented, but the following are not: "
+				+ notRepresentedInTimeline.toString(), 0, notRepresentedInTimeline.size());
+
+		assertEquals("Expected no repeats but received: " + duplicates.toString(), 0, duplicates.size());
+	}
+
+	/**
+	 * Recursively gathers all timeline event types for artifacts.
+	 *
+	 * @return Timeline event types for artifacts.
+	 */
+	private Stream<TimelineEventArtifactTypeImpl> getArtifactEvents() {
+		return getArtifactEvents(TimelineEventType.ROOT_EVENT_TYPE);
+	}
+
+	/**
+	 * Recursively gathers all timeline event types for artifacts.
+	 *
+	 * @param type The parent type that will be checked and whose children will
+	 *             be checked.
+	 *
+	 * @return Timeline event types for artifacts.
+	 */
+	private Stream<TimelineEventArtifactTypeImpl> getArtifactEvents(TimelineEventType type) {
+		Stream<TimelineEventArtifactTypeImpl> thisItem = type instanceof TimelineEventArtifactTypeImpl
+				? Stream.of((TimelineEventArtifactTypeImpl) type)
+				: Stream.empty();
+
+		Stream<TimelineEventArtifactTypeImpl> children = type.getChildren() == null
+				? Stream.empty()
+				: type.getChildren().stream()
+						.flatMap(t -> getArtifactEvents(t));
+
+		return Stream.concat(thisItem, children);
+	}
+}
diff --git a/case-uco/java/nbproject/project.properties b/case-uco/java/nbproject/project.properties
index 8655d8ef2601abd9cfb34126c14b9e66b30d4e5c..54f8132cb17d77668360f088672d856ae3a00ef9 100644
--- a/case-uco/java/nbproject/project.properties
+++ b/case-uco/java/nbproject/project.properties
@@ -35,14 +35,14 @@ dist.javadoc.dir=${dist.dir}/javadoc
 endorsed.classpath=
 excludes=
 file.reference.gson-2.8.5.jar=lib/gson-2.8.5.jar
-file.reference.sleuthkit-4.10.2.jar=lib/sleuthkit-4.10.2.jar
+file.reference.sleuthkit-4.11.0.jar=lib/sleuthkit-4.11.0.jar
 includes=**
 jar.archive.disabled=${jnlp.enabled}
 jar.compress=false
 jar.index=${jnlp.enabled}
 javac.classpath=\
     ${file.reference.gson-2.8.5.jar}:\
-${file.reference.sleuthkit-4.10.2.jar}
+${file.reference.sleuthkit-4.11.0.jar}
 # Space-separated list of extra javac options
 javac.compilerargs=-Xlint
 javac.deprecation=false
diff --git a/case-uco/java/src/org/sleuthkit/caseuco/CaseUcoExporter.java b/case-uco/java/src/org/sleuthkit/caseuco/CaseUcoExporter.java
index 706e33c3aa6afc68fbf50e6e75a640c95921ff00..dd21f2d494b887853d45cd6c0800bb4e3e0b70fb 100755
--- a/case-uco/java/src/org/sleuthkit/caseuco/CaseUcoExporter.java
+++ b/case-uco/java/src/org/sleuthkit/caseuco/CaseUcoExporter.java
@@ -414,6 +414,7 @@ public List<JsonElement> exportVolumeSystem(VolumeSystem volumeSystem) throws Ts
      * @throws BlackboardJsonAttrUtil.InvalidJsonException If a JSON valued
      * attribute could not be correctly deserialized.
      */
+    @SuppressWarnings( "deprecation" )
     public List<JsonElement> exportBlackboardArtifact(BlackboardArtifact artifact) throws TskCoreException,
             ContentNotExportableException, BlackboardJsonAttrUtil.InvalidJsonException {
         List<JsonElement> output = new ArrayList<>();
diff --git a/configure.ac b/configure.ac
index 8d7f27b9f957316f765b99cc87d1f8bc18709362..093a9a26fce98e54947f02826e58efdee36c7bc4 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4,7 +4,7 @@ dnl Process this file with autoconf to produce a configure script.
 
 AC_PREREQ(2.59)
 
-AC_INIT(sleuthkit, 4.10.2)
+AC_INIT(sleuthkit, 4.11.0)
 m4_include([m4/ax_pthread.m4])
 dnl include the version from 1.12.1. This will work for
 m4_include([m4/cppunit.m4])
diff --git a/debian/changelog b/debian/changelog
index badf8e8c181f82668039c7d043b164284d038add..d58b9047b8e9944e308a07e50856ca5b570c3f4f 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,4 +1,4 @@
-sleuthkit-java (4.10.2-1) unstable; urgency=medium
+sleuthkit-java (4.11.0-1) unstable; urgency=medium
 
   * Initial release (Closes: #nnnn)  <nnnn is the bug number of your ITP>
 
diff --git a/debian/sleuthkit-java.install b/debian/sleuthkit-java.install
index adc6df30e958cdd856d82a08900db246d117b85e..1a26f0ab860346c33c0d5501e5453b5f1b9649c0 100644
--- a/debian/sleuthkit-java.install
+++ b/debian/sleuthkit-java.install
@@ -1,4 +1,4 @@
 bindings/java/lib/sqlite-jdbc-3.25.2.jar /usr/share/java
-bindings/java/dist/sleuthkit-4.10.2.jar /usr/share/java
-case-uco/java/dist/sleuthkit-caseuco-4.10.2.jar /usr/share/java
+bindings/java/dist/sleuthkit-4.11.0.jar /usr/share/java
+case-uco/java/dist/sleuthkit-caseuco-4.11.0.jar /usr/share/java
 
diff --git a/licenses/GNU-COPYING b/licenses/GNUv2-COPYING
similarity index 100%
rename from licenses/GNU-COPYING
rename to licenses/GNUv2-COPYING
diff --git a/licenses/GNUv3-COPYING b/licenses/GNUv3-COPYING
new file mode 100644
index 0000000000000000000000000000000000000000..f288702d2fa16d3cdf0035b15a9fcbc552cd88e7
--- /dev/null
+++ b/licenses/GNUv3-COPYING
@@ -0,0 +1,674 @@
+                    GNU GENERAL PUBLIC LICENSE
+                       Version 3, 29 June 2007
+
+ Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+                            Preamble
+
+  The GNU General Public License is a free, copyleft license for
+software and other kinds of works.
+
+  The licenses for most software and other practical works are designed
+to take away your freedom to share and change the works.  By contrast,
+the GNU General Public License is intended to guarantee your freedom to
+share and change all versions of a program--to make sure it remains free
+software for all its users.  We, the Free Software Foundation, use the
+GNU General Public License for most of our software; it applies also to
+any other work released this way by its authors.  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+them if you wish), that you receive source code or can get it if you
+want it, that you can change the software or use pieces of it in new
+free programs, and that you know you can do these things.
+
+  To protect your rights, we need to prevent others from denying you
+these rights or asking you to surrender the rights.  Therefore, you have
+certain responsibilities if you distribute copies of the software, or if
+you modify it: responsibilities to respect the freedom of others.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must pass on to the recipients the same
+freedoms that you received.  You must make sure that they, too, receive
+or can get the source code.  And you must show them these terms so they
+know their rights.
+
+  Developers that use the GNU GPL protect your rights with two steps:
+(1) assert copyright on the software, and (2) offer you this License
+giving you legal permission to copy, distribute and/or modify it.
+
+  For the developers' and authors' protection, the GPL clearly explains
+that there is no warranty for this free software.  For both users' and
+authors' sake, the GPL requires that modified versions be marked as
+changed, so that their problems will not be attributed erroneously to
+authors of previous versions.
+
+  Some devices are designed to deny users access to install or run
+modified versions of the software inside them, although the manufacturer
+can do so.  This is fundamentally incompatible with the aim of
+protecting users' freedom to change the software.  The systematic
+pattern of such abuse occurs in the area of products for individuals to
+use, which is precisely where it is most unacceptable.  Therefore, we
+have designed this version of the GPL to prohibit the practice for those
+products.  If such problems arise substantially in other domains, we
+stand ready to extend this provision to those domains in future versions
+of the GPL, as needed to protect the freedom of users.
+
+  Finally, every program is threatened constantly by software patents.
+States should not allow patents to restrict development and use of
+software on general-purpose computers, but in those that do, we wish to
+avoid the special danger that patents applied to a free program could
+make it effectively proprietary.  To prevent this, the GPL assures that
+patents cannot be used to render the program non-free.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+                       TERMS AND CONDITIONS
+
+  0. Definitions.
+
+  "This License" refers to version 3 of the GNU General Public License.
+
+  "Copyright" also means copyright-like laws that apply to other kinds of
+works, such as semiconductor masks.
+
+  "The Program" refers to any copyrightable work licensed under this
+License.  Each licensee is addressed as "you".  "Licensees" and
+"recipients" may be individuals or organizations.
+
+  To "modify" a work means to copy from or adapt all or part of the work
+in a fashion requiring copyright permission, other than the making of an
+exact copy.  The resulting work is called a "modified version" of the
+earlier work or a work "based on" the earlier work.
+
+  A "covered work" means either the unmodified Program or a work based
+on the Program.
+
+  To "propagate" a work means to do anything with it that, without
+permission, would make you directly or secondarily liable for
+infringement under applicable copyright law, except executing it on a
+computer or modifying a private copy.  Propagation includes copying,
+distribution (with or without modification), making available to the
+public, and in some countries other activities as well.
+
+  To "convey" a work means any kind of propagation that enables other
+parties to make or receive copies.  Mere interaction with a user through
+a computer network, with no transfer of a copy, is not conveying.
+
+  An interactive user interface displays "Appropriate Legal Notices"
+to the extent that it includes a convenient and prominently visible
+feature that (1) displays an appropriate copyright notice, and (2)
+tells the user that there is no warranty for the work (except to the
+extent that warranties are provided), that licensees may convey the
+work under this License, and how to view a copy of this License.  If
+the interface presents a list of user commands or options, such as a
+menu, a prominent item in the list meets this criterion.
+
+  1. Source Code.
+
+  The "source code" for a work means the preferred form of the work
+for making modifications to it.  "Object code" means any non-source
+form of a work.
+
+  A "Standard Interface" means an interface that either is an official
+standard defined by a recognized standards body, or, in the case of
+interfaces specified for a particular programming language, one that
+is widely used among developers working in that language.
+
+  The "System Libraries" of an executable work include anything, other
+than the work as a whole, that (a) is included in the normal form of
+packaging a Major Component, but which is not part of that Major
+Component, and (b) serves only to enable use of the work with that
+Major Component, or to implement a Standard Interface for which an
+implementation is available to the public in source code form.  A
+"Major Component", in this context, means a major essential component
+(kernel, window system, and so on) of the specific operating system
+(if any) on which the executable work runs, or a compiler used to
+produce the work, or an object code interpreter used to run it.
+
+  The "Corresponding Source" for a work in object code form means all
+the source code needed to generate, install, and (for an executable
+work) run the object code and to modify the work, including scripts to
+control those activities.  However, it does not include the work's
+System Libraries, or general-purpose tools or generally available free
+programs which are used unmodified in performing those activities but
+which are not part of the work.  For example, Corresponding Source
+includes interface definition files associated with source files for
+the work, and the source code for shared libraries and dynamically
+linked subprograms that the work is specifically designed to require,
+such as by intimate data communication or control flow between those
+subprograms and other parts of the work.
+
+  The Corresponding Source need not include anything that users
+can regenerate automatically from other parts of the Corresponding
+Source.
+
+  The Corresponding Source for a work in source code form is that
+same work.
+
+  2. Basic Permissions.
+
+  All rights granted under this License are granted for the term of
+copyright on the Program, and are irrevocable provided the stated
+conditions are met.  This License explicitly affirms your unlimited
+permission to run the unmodified Program.  The output from running a
+covered work is covered by this License only if the output, given its
+content, constitutes a covered work.  This License acknowledges your
+rights of fair use or other equivalent, as provided by copyright law.
+
+  You may make, run and propagate covered works that you do not
+convey, without conditions so long as your license otherwise remains
+in force.  You may convey covered works to others for the sole purpose
+of having them make modifications exclusively for you, or provide you
+with facilities for running those works, provided that you comply with
+the terms of this License in conveying all material for which you do
+not control copyright.  Those thus making or running the covered works
+for you must do so exclusively on your behalf, under your direction
+and control, on terms that prohibit them from making any copies of
+your copyrighted material outside their relationship with you.
+
+  Conveying under any other circumstances is permitted solely under
+the conditions stated below.  Sublicensing is not allowed; section 10
+makes it unnecessary.
+
+  3. Protecting Users' Legal Rights From Anti-Circumvention Law.
+
+  No covered work shall be deemed part of an effective technological
+measure under any applicable law fulfilling obligations under article
+11 of the WIPO copyright treaty adopted on 20 December 1996, or
+similar laws prohibiting or restricting circumvention of such
+measures.
+
+  When you convey a covered work, you waive any legal power to forbid
+circumvention of technological measures to the extent such circumvention
+is effected by exercising rights under this License with respect to
+the covered work, and you disclaim any intention to limit operation or
+modification of the work as a means of enforcing, against the work's
+users, your or third parties' legal rights to forbid circumvention of
+technological measures.
+
+  4. Conveying Verbatim Copies.
+
+  You may convey verbatim copies of the Program's source code as you
+receive it, in any medium, provided that you conspicuously and
+appropriately publish on each copy an appropriate copyright notice;
+keep intact all notices stating that this License and any
+non-permissive terms added in accord with section 7 apply to the code;
+keep intact all notices of the absence of any warranty; and give all
+recipients a copy of this License along with the Program.
+
+  You may charge any price or no price for each copy that you convey,
+and you may offer support or warranty protection for a fee.
+
+  5. Conveying Modified Source Versions.
+
+  You may convey a work based on the Program, or the modifications to
+produce it from the Program, in the form of source code under the
+terms of section 4, provided that you also meet all of these conditions:
+
+    a) The work must carry prominent notices stating that you modified
+    it, and giving a relevant date.
+
+    b) The work must carry prominent notices stating that it is
+    released under this License and any conditions added under section
+    7.  This requirement modifies the requirement in section 4 to
+    "keep intact all notices".
+
+    c) You must license the entire work, as a whole, under this
+    License to anyone who comes into possession of a copy.  This
+    License will therefore apply, along with any applicable section 7
+    additional terms, to the whole of the work, and all its parts,
+    regardless of how they are packaged.  This License gives no
+    permission to license the work in any other way, but it does not
+    invalidate such permission if you have separately received it.
+
+    d) If the work has interactive user interfaces, each must display
+    Appropriate Legal Notices; however, if the Program has interactive
+    interfaces that do not display Appropriate Legal Notices, your
+    work need not make them do so.
+
+  A compilation of a covered work with other separate and independent
+works, which are not by their nature extensions of the covered work,
+and which are not combined with it such as to form a larger program,
+in or on a volume of a storage or distribution medium, is called an
+"aggregate" if the compilation and its resulting copyright are not
+used to limit the access or legal rights of the compilation's users
+beyond what the individual works permit.  Inclusion of a covered work
+in an aggregate does not cause this License to apply to the other
+parts of the aggregate.
+
+  6. Conveying Non-Source Forms.
+
+  You may convey a covered work in object code form under the terms
+of sections 4 and 5, provided that you also convey the
+machine-readable Corresponding Source under the terms of this License,
+in one of these ways:
+
+    a) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by the
+    Corresponding Source fixed on a durable physical medium
+    customarily used for software interchange.
+
+    b) Convey the object code in, or embodied in, a physical product
+    (including a physical distribution medium), accompanied by a
+    written offer, valid for at least three years and valid for as
+    long as you offer spare parts or customer support for that product
+    model, to give anyone who possesses the object code either (1) a
+    copy of the Corresponding Source for all the software in the
+    product that is covered by this License, on a durable physical
+    medium customarily used for software interchange, for a price no
+    more than your reasonable cost of physically performing this
+    conveying of source, or (2) access to copy the
+    Corresponding Source from a network server at no charge.
+
+    c) Convey individual copies of the object code with a copy of the
+    written offer to provide the Corresponding Source.  This
+    alternative is allowed only occasionally and noncommercially, and
+    only if you received the object code with such an offer, in accord
+    with subsection 6b.
+
+    d) Convey the object code by offering access from a designated
+    place (gratis or for a charge), and offer equivalent access to the
+    Corresponding Source in the same way through the same place at no
+    further charge.  You need not require recipients to copy the
+    Corresponding Source along with the object code.  If the place to
+    copy the object code is a network server, the Corresponding Source
+    may be on a different server (operated by you or a third party)
+    that supports equivalent copying facilities, provided you maintain
+    clear directions next to the object code saying where to find the
+    Corresponding Source.  Regardless of what server hosts the
+    Corresponding Source, you remain obligated to ensure that it is
+    available for as long as needed to satisfy these requirements.
+
+    e) Convey the object code using peer-to-peer transmission, provided
+    you inform other peers where the object code and Corresponding
+    Source of the work are being offered to the general public at no
+    charge under subsection 6d.
+
+  A separable portion of the object code, whose source code is excluded
+from the Corresponding Source as a System Library, need not be
+included in conveying the object code work.
+
+  A "User Product" is either (1) a "consumer product", which means any
+tangible personal property which is normally used for personal, family,
+or household purposes, or (2) anything designed or sold for incorporation
+into a dwelling.  In determining whether a product is a consumer product,
+doubtful cases shall be resolved in favor of coverage.  For a particular
+product received by a particular user, "normally used" refers to a
+typical or common use of that class of product, regardless of the status
+of the particular user or of the way in which the particular user
+actually uses, or expects or is expected to use, the product.  A product
+is a consumer product regardless of whether the product has substantial
+commercial, industrial or non-consumer uses, unless such uses represent
+the only significant mode of use of the product.
+
+  "Installation Information" for a User Product means any methods,
+procedures, authorization keys, or other information required to install
+and execute modified versions of a covered work in that User Product from
+a modified version of its Corresponding Source.  The information must
+suffice to ensure that the continued functioning of the modified object
+code is in no case prevented or interfered with solely because
+modification has been made.
+
+  If you convey an object code work under this section in, or with, or
+specifically for use in, a User Product, and the conveying occurs as
+part of a transaction in which the right of possession and use of the
+User Product is transferred to the recipient in perpetuity or for a
+fixed term (regardless of how the transaction is characterized), the
+Corresponding Source conveyed under this section must be accompanied
+by the Installation Information.  But this requirement does not apply
+if neither you nor any third party retains the ability to install
+modified object code on the User Product (for example, the work has
+been installed in ROM).
+
+  The requirement to provide Installation Information does not include a
+requirement to continue to provide support service, warranty, or updates
+for a work that has been modified or installed by the recipient, or for
+the User Product in which it has been modified or installed.  Access to a
+network may be denied when the modification itself materially and
+adversely affects the operation of the network or violates the rules and
+protocols for communication across the network.
+
+  Corresponding Source conveyed, and Installation Information provided,
+in accord with this section must be in a format that is publicly
+documented (and with an implementation available to the public in
+source code form), and must require no special password or key for
+unpacking, reading or copying.
+
+  7. Additional Terms.
+
+  "Additional permissions" are terms that supplement the terms of this
+License by making exceptions from one or more of its conditions.
+Additional permissions that are applicable to the entire Program shall
+be treated as though they were included in this License, to the extent
+that they are valid under applicable law.  If additional permissions
+apply only to part of the Program, that part may be used separately
+under those permissions, but the entire Program remains governed by
+this License without regard to the additional permissions.
+
+  When you convey a copy of a covered work, you may at your option
+remove any additional permissions from that copy, or from any part of
+it.  (Additional permissions may be written to require their own
+removal in certain cases when you modify the work.)  You may place
+additional permissions on material, added by you to a covered work,
+for which you have or can give appropriate copyright permission.
+
+  Notwithstanding any other provision of this License, for material you
+add to a covered work, you may (if authorized by the copyright holders of
+that material) supplement the terms of this License with terms:
+
+    a) Disclaiming warranty or limiting liability differently from the
+    terms of sections 15 and 16 of this License; or
+
+    b) Requiring preservation of specified reasonable legal notices or
+    author attributions in that material or in the Appropriate Legal
+    Notices displayed by works containing it; or
+
+    c) Prohibiting misrepresentation of the origin of that material, or
+    requiring that modified versions of such material be marked in
+    reasonable ways as different from the original version; or
+
+    d) Limiting the use for publicity purposes of names of licensors or
+    authors of the material; or
+
+    e) Declining to grant rights under trademark law for use of some
+    trade names, trademarks, or service marks; or
+
+    f) Requiring indemnification of licensors and authors of that
+    material by anyone who conveys the material (or modified versions of
+    it) with contractual assumptions of liability to the recipient, for
+    any liability that these contractual assumptions directly impose on
+    those licensors and authors.
+
+  All other non-permissive additional terms are considered "further
+restrictions" within the meaning of section 10.  If the Program as you
+received it, or any part of it, contains a notice stating that it is
+governed by this License along with a term that is a further
+restriction, you may remove that term.  If a license document contains
+a further restriction but permits relicensing or conveying under this
+License, you may add to a covered work material governed by the terms
+of that license document, provided that the further restriction does
+not survive such relicensing or conveying.
+
+  If you add terms to a covered work in accord with this section, you
+must place, in the relevant source files, a statement of the
+additional terms that apply to those files, or a notice indicating
+where to find the applicable terms.
+
+  Additional terms, permissive or non-permissive, may be stated in the
+form of a separately written license, or stated as exceptions;
+the above requirements apply either way.
+
+  8. Termination.
+
+  You may not propagate or modify a covered work except as expressly
+provided under this License.  Any attempt otherwise to propagate or
+modify it is void, and will automatically terminate your rights under
+this License (including any patent licenses granted under the third
+paragraph of section 11).
+
+  However, if you cease all violation of this License, then your
+license from a particular copyright holder is reinstated (a)
+provisionally, unless and until the copyright holder explicitly and
+finally terminates your license, and (b) permanently, if the copyright
+holder fails to notify you of the violation by some reasonable means
+prior to 60 days after the cessation.
+
+  Moreover, your license from a particular copyright holder is
+reinstated permanently if the copyright holder notifies you of the
+violation by some reasonable means, this is the first time you have
+received notice of violation of this License (for any work) from that
+copyright holder, and you cure the violation prior to 30 days after
+your receipt of the notice.
+
+  Termination of your rights under this section does not terminate the
+licenses of parties who have received copies or rights from you under
+this License.  If your rights have been terminated and not permanently
+reinstated, you do not qualify to receive new licenses for the same
+material under section 10.
+
+  9. Acceptance Not Required for Having Copies.
+
+  You are not required to accept this License in order to receive or
+run a copy of the Program.  Ancillary propagation of a covered work
+occurring solely as a consequence of using peer-to-peer transmission
+to receive a copy likewise does not require acceptance.  However,
+nothing other than this License grants you permission to propagate or
+modify any covered work.  These actions infringe copyright if you do
+not accept this License.  Therefore, by modifying or propagating a
+covered work, you indicate your acceptance of this License to do so.
+
+  10. Automatic Licensing of Downstream Recipients.
+
+  Each time you convey a covered work, the recipient automatically
+receives a license from the original licensors, to run, modify and
+propagate that work, subject to this License.  You are not responsible
+for enforcing compliance by third parties with this License.
+
+  An "entity transaction" is a transaction transferring control of an
+organization, or substantially all assets of one, or subdividing an
+organization, or merging organizations.  If propagation of a covered
+work results from an entity transaction, each party to that
+transaction who receives a copy of the work also receives whatever
+licenses to the work the party's predecessor in interest had or could
+give under the previous paragraph, plus a right to possession of the
+Corresponding Source of the work from the predecessor in interest, if
+the predecessor has it or can get it with reasonable efforts.
+
+  You may not impose any further restrictions on the exercise of the
+rights granted or affirmed under this License.  For example, you may
+not impose a license fee, royalty, or other charge for exercise of
+rights granted under this License, and you may not initiate litigation
+(including a cross-claim or counterclaim in a lawsuit) alleging that
+any patent claim is infringed by making, using, selling, offering for
+sale, or importing the Program or any portion of it.
+
+  11. Patents.
+
+  A "contributor" is a copyright holder who authorizes use under this
+License of the Program or a work on which the Program is based.  The
+work thus licensed is called the contributor's "contributor version".
+
+  A contributor's "essential patent claims" are all patent claims
+owned or controlled by the contributor, whether already acquired or
+hereafter acquired, that would be infringed by some manner, permitted
+by this License, of making, using, or selling its contributor version,
+but do not include claims that would be infringed only as a
+consequence of further modification of the contributor version.  For
+purposes of this definition, "control" includes the right to grant
+patent sublicenses in a manner consistent with the requirements of
+this License.
+
+  Each contributor grants you a non-exclusive, worldwide, royalty-free
+patent license under the contributor's essential patent claims, to
+make, use, sell, offer for sale, import and otherwise run, modify and
+propagate the contents of its contributor version.
+
+  In the following three paragraphs, a "patent license" is any express
+agreement or commitment, however denominated, not to enforce a patent
+(such as an express permission to practice a patent or covenant not to
+sue for patent infringement).  To "grant" such a patent license to a
+party means to make such an agreement or commitment not to enforce a
+patent against the party.
+
+  If you convey a covered work, knowingly relying on a patent license,
+and the Corresponding Source of the work is not available for anyone
+to copy, free of charge and under the terms of this License, through a
+publicly available network server or other readily accessible means,
+then you must either (1) cause the Corresponding Source to be so
+available, or (2) arrange to deprive yourself of the benefit of the
+patent license for this particular work, or (3) arrange, in a manner
+consistent with the requirements of this License, to extend the patent
+license to downstream recipients.  "Knowingly relying" means you have
+actual knowledge that, but for the patent license, your conveying the
+covered work in a country, or your recipient's use of the covered work
+in a country, would infringe one or more identifiable patents in that
+country that you have reason to believe are valid.
+
+  If, pursuant to or in connection with a single transaction or
+arrangement, you convey, or propagate by procuring conveyance of, a
+covered work, and grant a patent license to some of the parties
+receiving the covered work authorizing them to use, propagate, modify
+or convey a specific copy of the covered work, then the patent license
+you grant is automatically extended to all recipients of the covered
+work and works based on it.
+
+  A patent license is "discriminatory" if it does not include within
+the scope of its coverage, prohibits the exercise of, or is
+conditioned on the non-exercise of one or more of the rights that are
+specifically granted under this License.  You may not convey a covered
+work if you are a party to an arrangement with a third party that is
+in the business of distributing software, under which you make payment
+to the third party based on the extent of your activity of conveying
+the work, and under which the third party grants, to any of the
+parties who would receive the covered work from you, a discriminatory
+patent license (a) in connection with copies of the covered work
+conveyed by you (or copies made from those copies), or (b) primarily
+for and in connection with specific products or compilations that
+contain the covered work, unless you entered into that arrangement,
+or that patent license was granted, prior to 28 March 2007.
+
+  Nothing in this License shall be construed as excluding or limiting
+any implied license or other defenses to infringement that may
+otherwise be available to you under applicable patent law.
+
+  12. No Surrender of Others' Freedom.
+
+  If conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot convey a
+covered work so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you may
+not convey it at all.  For example, if you agree to terms that obligate you
+to collect a royalty for further conveying from those to whom you convey
+the Program, the only way you could satisfy both those terms and this
+License would be to refrain entirely from conveying the Program.
+
+  13. Use with the GNU Affero General Public License.
+
+  Notwithstanding any other provision of this License, you have
+permission to link or combine any covered work with a work licensed
+under version 3 of the GNU Affero General Public License into a single
+combined work, and to convey the resulting work.  The terms of this
+License will continue to apply to the part which is the covered work,
+but the special requirements of the GNU Affero General Public License,
+section 13, concerning interaction through a network will apply to the
+combination as such.
+
+  14. Revised Versions of this License.
+
+  The Free Software Foundation may publish revised and/or new versions of
+the GNU General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+  Each version is given a distinguishing version number.  If the
+Program specifies that a certain numbered version of the GNU General
+Public License "or any later version" applies to it, you have the
+option of following the terms and conditions either of that numbered
+version or of any later version published by the Free Software
+Foundation.  If the Program does not specify a version number of the
+GNU General Public License, you may choose any version ever published
+by the Free Software Foundation.
+
+  If the Program specifies that a proxy can decide which future
+versions of the GNU General Public License can be used, that proxy's
+public statement of acceptance of a version permanently authorizes you
+to choose that version for the Program.
+
+  Later license versions may give you additional or different
+permissions.  However, no additional obligations are imposed on any
+author or copyright holder as a result of your choosing to follow a
+later version.
+
+  15. Disclaimer of Warranty.
+
+  THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
+APPLICABLE LAW.  EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
+HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
+OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
+THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE.  THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
+IS WITH YOU.  SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
+ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+  16. Limitation of Liability.
+
+  IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
+THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
+GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
+USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
+DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
+PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
+EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
+SUCH DAMAGES.
+
+  17. Interpretation of Sections 15 and 16.
+
+  If the disclaimer of warranty and limitation of liability provided
+above cannot be given local legal effect according to their terms,
+reviewing courts shall apply local law that most closely approximates
+an absolute waiver of all civil liability in connection with the
+Program, unless a warranty or assumption of liability accompanies a
+copy of the Program in return for a fee.
+
+                     END OF TERMS AND CONDITIONS
+
+            How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+state the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software: you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation, either version 3 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+Also add information on how to contact you by electronic and paper mail.
+
+  If the program does terminal interaction, make it output a short
+notice like this when it starts in an interactive mode:
+
+    <program>  Copyright (C) <year>  <name of author>
+    This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, your program's commands
+might be different; for a GUI interface, you would use an "about box".
+
+  You should also get your employer (if you work as a programmer) or school,
+if any, to sign a "copyright disclaimer" for the program, if necessary.
+For more information on this, and how to apply and follow the GNU GPL, see
+<https://www.gnu.org/licenses/>.
+
+  The GNU General Public License does not permit incorporating your program
+into proprietary programs.  If your program is a subroutine library, you
+may consider it more useful to permit linking proprietary applications with
+the library.  If this is what you want to do, use the GNU Lesser General
+Public License instead of this License.  But first, please read
+<https://www.gnu.org/licenses/why-not-lgpl.html>.
diff --git a/licenses/README.md b/licenses/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..8d78a6849cc2510659ca4a2b5f975540a6105ab1
--- /dev/null
+++ b/licenses/README.md
@@ -0,0 +1,274 @@
+## Copyright/Non-Copyright Statements
+
+The SleuthKit (tsk/) originates from the The Coronors Toolkit (TCT),
+which was largely authored by Wietse Venema in 1997-1999 under the IBM Public
+License version 1.0:
+
+```
+Copyright (c) 1997,1998,1999, International Business Machines          
+Corporation and others. All Rights Reserved.
+```
+
+The TCT code was adopted by TASK in 2002-2003 by Brian Carrier under the Common
+Public License version 1.0
+
+```
+Copyright (c) 2002-2003 Brian Carrier, @stake Inc.  All rights reserved
+```
+
+Who then adopted TASK code into the SleuthKit in 2003 under the existing
+license:
+
+```
+Copyright (c) 2003-2005 Brian Carrier.  All rights reserved
+Copyright (c) 2006-2011 Brian Carrier, Basis Technology.  All Rights reserved
+```
+
+## Other materials
+
+* Makefile and Makefile.in
+
+```
+Copyright (C) 1994-2020 Free Software Foundation, Inc.
+
+This Makefile.in is free software; the Free Software Foundation
+gives unlimited permission to copy and/or distribute it,
+with or without modifications, as long as this notice is preserved.
+```
+
+* bindings/java/jni/*
+
+```
+Copyright (c) 2020 Brian Carrier.  All Rights reserved
+```
+
+under Common Public License version 1.0
+
+* bindings/java/src/org/sleuthkit/datamodel/*
+
+```
+Copyright 2011-2021 Basis Technology Corp.
+```
+
+under Apache License, Version 2.0
+
+* case-uco/*
+
+```
+Copyright 2020 Basis Technology Corp.
+```
+
+under Apache License, Version 2.0
+
+* debian/*
+
+```
+Copyright: 2001-2016 Brian Carrier <carrier@sleuthkit.org>
+           2002      @stake Inc.
+           2005      Naysawn Naderi <ndn@xiphos.ca>
+           2006-2011 Joachim Metz <jbmetz@users.sourceforge.net>
+           2006-2014 Basis Technology <carrier@sleuthkit.org>
+```
+
+under Common Public License version 1.0
+
+* tsk/auto/guid.cpp
+
+```
+Copyright (c) 2014 Graeme Hill (http://graemehill.ca)
+```
+
+under MIT license
+
+* m4/ax_check_openssl.m4
+
+```
+Copyright (c) 2009,2010 Zmanda Inc. <http://www.zmanda.com/>
+Copyright (c) 2009,2010 Dustin J. Mitchell <dustin@zmanda.com>
+
+Copying and distribution of this file, with or without modification, are
+permitted in any medium without royalty provided the copyright notice and this
+notice are preserved. This file is offered as-is, without any warranty
+```
+
+* m4/ax_jni_include_dir.m4
+
+```
+Copyright (c) 2008 Don Anderson <dda@sleepycat.com>
+
+Copying and distribution of this file, with or without modification, are
+permitted in any medium without royalty provided the copyright notice and this
+notice are preserved. This file is offered as-is, without any warranty
+```
+
+* m4/ax_cxx_compile_stdcxx.m4
+
+```
+Copyright (c) 2008 Benjamin Kosnik <bkoz@redhat.com>
+Copyright (c) 2012 Zack Weinberg <zackw@panix.com>
+Copyright (c) 2013 Roy Stogner <roystgnr@ices.utexas.edu>
+Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov <sokolov@google.com>
+Copyright (c) 2015 Paul Norman <penorman@mac.com>
+Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
+Copyright (c) 2016, 2018 Krzesimir Nowak <qdlacz@gmail.com>
+Copyright (c) 2019 Enji Cooper <yaneurabeya@gmail.com>
+
+Copying and distribution of this file, with or without modification, are
+permitted in any medium without royalty provided the copyright notice and this
+notice are preserved. This file is offered as-is, without any warranty
+```
+
+* m4/ax_pthread.m4
+
+```
+Copyright (c) 2008 Steven G. Johnson <stevenj@alum.mit.edu>
+````
+
+under GNU General Public License version 3 or later
+
+* rejistry++/*
+
+```
+Copyright 2013-2015 Basis Technology Corp.
+```
+
+under Apache License, Version 2.0
+
+* samples/*
+
+```
+Copyright (c) 2008-2011  Brian Carrier <carrier <at> sleuthkit <dot> org>
+```
+
+under 3-Clause BSD license
+
+* tests/*
+
+```
+Copyright (c) 2008-2011 Brian Carrier.  All Rights reserved
+```
+
+under Common Public License version 1.0
+
+* tsk/auto/sqlite3.c and tsk/auto/sqlite3.h
+
+```
+2001 September 15
+
+The author disclaims copyright to this source code.  In place of
+a legal notice, here is a blessing:
+
+    May you do good and not evil.
+    May you find forgiveness for yourself and forgive others.
+    May you share freely, never taking more than you give.
+```
+
+* tsk/base/crc.c and tsk/base/crc.h
+
+```
+Copyright (C) Ross Williams, 1993. However, permission is
+granted to make and distribute verbatim copies of this
+document provided that this information block and copyright
+notice is included. Also, the C code modules included
+in this document are fully public domain.
+```
+
+* tsk/base/md5c.c
+
+```
+Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All rights reserved.
+
+License to copy and use this software is granted provided that it
+is identified as the "RSA Data Security, Inc. MD5 Message-Digest
+Algorithm" in all material mentioning or referencing this software
+or this function.
+
+License is also granted to make and use derivative works provided
+that such works are identified as "derived from the RSA Data
+Security, Inc. MD5 Message-Digest Algorithm" in all material
+mentioning or referencing the derived work.
+
+RSA Data Security, Inc. makes no representations concerning either
+the merchantability of this software or the suitability of this
+software for any particular purpose. It is provided "as is"
+without express or implied warranty of any kind.
+
+These notices must be retained in any copies of any part of this
+documentation and/or software.
+```
+
+* tsk/base/sha1c.c
+
+```
+This version written November 2000 by David Ireland of 
+DI Management Services Pty Limited <code@di-mgt.com.au>
+
+Adapted from code in the Python Cryptography Toolkit, 
+version 1.0.0 by A.M. Kuchling 1995.
+```
+
+as public domain
+
+* tsk/base/tsk_unicode.c and tsk/base/tsk_unicode.h
+
+```
+Copyright 2001-2004 Unicode, Inc.
+
+Unicode, Inc. hereby grants the right to freely use the information
+supplied in this file in the creation of products supporting the
+Unicode Standard, and to make copies of this file in any form
+for internal or external distribution as long as this notice
+remains attached.
+```
+
+* tsk/base/XGetopt.c
+
+```
+2002-2003 Hans Dietrich <hdietrich2@hotmail.com>
+
+This software is released into the public domain.
+You are free to use it in any way you like.
+```
+
+* tsk/fs/lzvn.c
+
+```
+Copyright (c) 2015-2016, Apple Inc. All rights reserved.
+```
+
+under 3-Clause BSD license
+
+* tools/srchtools/srch_strings.c
+
+```
+Copyright 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
+```
+
+under GNU General Public License version 2
+
+* tools/fiwalk/*
+
+```
+2008-2013 Simson L. Garfinkel
+```
+
+as public domain
+
+* tools/fiwalk/plugins/*
+
+```
+James Migletz and Simson Garfinkel
+```
+
+as public domain
+
+* tools/fiwalk/src/base64.cpp and tools/fiwalk/src/base64.h
+
+```
+Copyright (C) 1996-1999 by Internet Software Consortium
+```
+
+Copyright by the Internet Software Consortium, with portions Copyright 1995
+by IBM. Both are licensed under a liberal copyright that allows inclusion
+in any program so long as the copyright notice is not removed.
+
diff --git a/packages/sleuthkit.spec b/packages/sleuthkit.spec
index 72ec66465ee65f71c4f446447b1d6679c36b1d56..7c18c0e2ac5a916ebb72a01421c0e189ecf3c9c8 100644
--- a/packages/sleuthkit.spec
+++ b/packages/sleuthkit.spec
@@ -1,5 +1,5 @@
 Name:		sleuthkit	
-Version:	4.10.2
+Version:	4.11.0
 Release:	1%{?dist}
 Summary:	The Sleuth Kit (TSK) is a library and collection of command line tools that allow you to investigate volume and file system data.	
 
diff --git a/tools/autotools/Makefile.am b/tools/autotools/Makefile.am
index 8058968f8be96ddcd593b01bb6dfea43b11840b6..4e686160a6710e41a72d255c81e8fcff19208219 100644
--- a/tools/autotools/Makefile.am
+++ b/tools/autotools/Makefile.am
@@ -4,11 +4,12 @@ LDADD = ../../tsk/libtsk.la
 LDFLAGS += -static
 EXTRA_DIST = .indent.pro
 
-bin_PROGRAMS = tsk_recover tsk_loaddb tsk_comparedir tsk_gettimes
+bin_PROGRAMS = tsk_recover tsk_loaddb tsk_comparedir tsk_gettimes tsk_imageinfo
 tsk_recover_SOURCES = tsk_recover.cpp 
 tsk_loaddb_SOURCES = tsk_loaddb.cpp 
 tsk_gettimes_SOURCES = tsk_gettimes.cpp 
 tsk_comparedir_SOURCES = tsk_comparedir.cpp tsk_comparedir.h
+tsk_imageinfo_SOURCES = tsk_imageinfo.cpp 
 
 indent:
 	indent *.cpp
diff --git a/tools/autotools/tsk_comparedir.cpp b/tools/autotools/tsk_comparedir.cpp
index db771d88e4c3cfc0aeda4641375fa1ca9bb35140..64be43c05031a460d41af2846b3fe47a42b06d52 100644
--- a/tools/autotools/tsk_comparedir.cpp
+++ b/tools/autotools/tsk_comparedir.cpp
@@ -348,7 +348,6 @@ main(int argc, char **argv1)
 
     TSK_POOL_TYPE_ENUM pooltype = TSK_POOL_TYPE_DETECT;
     TSK_OFF_T pvol_block = 0;
-    const char * password = "";
     
 #ifdef WIN32
     argv = CommandLineToArgvW(GetCommandLineW(), &argc);
diff --git a/tools/autotools/tsk_imageinfo.cpp b/tools/autotools/tsk_imageinfo.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..3bb63459f6637a7b03702ae2825877c67d66b6fb
--- /dev/null
+++ b/tools/autotools/tsk_imageinfo.cpp
@@ -0,0 +1,131 @@
+/*
+ ** tsk_imageinfo
+ ** The Sleuth Kit 
+ **
+ ** Brian Carrier [carrier <at> sleuthkit [dot] org]
+ ** Copyright (c) 2021 Brian Carrier.  All Rights reserved
+ **
+ ** This software is distributed under the Common Public License 1.0
+ **
+ */
+
+#include "tsk/tsk_tools_i.h"
+#include "tsk/auto/tsk_is_image_supported.h"
+#include <locale.h>
+#include <time.h>
+
+
+static TSK_TCHAR *progname;
+
+static void
+usage()
+{
+    TFPRINTF(stderr,
+        _TSK_T
+        ("usage: %s [-vV] [-i imgtype] [-b dev_sector_size] image\n"),
+        progname);
+    tsk_fprintf(stderr,
+        "\t-i imgtype: The format of the image file (use '-i list' for supported types)\n");
+    tsk_fprintf(stderr,
+        "\t-b dev_sector_size: The size (in bytes) of the device sectors\n");
+    tsk_fprintf(stderr, "\t-v: verbose output to stderr\n");
+    tsk_fprintf(stderr, "\t-V: Print version\n");
+    
+
+    exit(1);
+}
+
+int
+main(int argc, char **argv1)
+{
+    TSK_IMG_TYPE_ENUM imgtype = TSK_IMG_TYPE_DETECT;
+    int ch;
+    TSK_TCHAR **argv;
+    unsigned int ssize = 0;
+    TSK_TCHAR *cp;
+	bool do_hash = false;
+
+#ifdef TSK_WIN32
+    // On Windows, get the wide arguments (mingw doesn't support wmain)
+    argv = CommandLineToArgvW(GetCommandLineW(), &argc);
+    if (argv == NULL) {
+        fprintf(stderr, "Error getting wide arguments\n");
+        exit(1);
+    }
+#else
+    argv = (TSK_TCHAR **) argv1;
+#endif
+
+    progname = argv[0];
+    setlocale(LC_ALL, "");
+
+    while ((ch = GETOPT(argc, argv, _TSK_T("b:i:vV"))) > 0) {
+        switch (ch) {
+        case _TSK_T('?'):
+        default:
+            TFPRINTF(stderr, _TSK_T("Invalid argument: %s\n"),
+                argv[OPTIND]);
+            usage();
+            
+        case _TSK_T('b'):
+            ssize = (unsigned int) TSTRTOUL(OPTARG, &cp, 0);
+            if (*cp || *cp == *OPTARG || ssize < 1) {
+                TFPRINTF(stderr,
+                    _TSK_T
+                    ("invalid argument: sector size must be positive: %s\n"),
+                    OPTARG);
+                usage();
+            }
+            break;
+
+        case _TSK_T('i'):
+            if (TSTRCMP(OPTARG, _TSK_T("list")) == 0) {
+                tsk_img_type_print(stderr);
+                exit(1);
+            }
+            imgtype = tsk_img_type_toid(OPTARG);
+            if (imgtype == TSK_IMG_TYPE_UNSUPP) {
+                TFPRINTF(stderr, _TSK_T("Unsupported image type: %s\n"),
+                    OPTARG);
+                usage();
+            }
+            break;
+
+        case _TSK_T('v'):
+            tsk_verbose++;
+            break;
+
+        case _TSK_T('V'):
+            tsk_version_print(stdout);
+            exit(0);            
+        }
+    }
+
+    /* We need at least one more argument */
+    if (OPTIND >= argc) {
+        tsk_fprintf(stderr,
+            "Missing image name\n");
+        usage();
+    }
+
+    int imageCount = argc - OPTIND;
+    if (imageCount != 1) {
+        tsk_fprintf(stderr,
+            "Only one image supported\n");
+        usage();
+    }
+
+    TskIsImageSupported imageProcessor;
+    if (imageProcessor.openImage(imageCount, &argv[OPTIND], imgtype, ssize)) {
+        tsk_error_print(stderr);
+        exit(1);
+    }
+
+    // Run findFilesInImage to process the image and detect data / encryption
+    int findFilesResult;
+    findFilesResult = imageProcessor.findFilesInImg();
+
+    imageProcessor.printResults();
+    
+    exit(0);
+}
diff --git a/tools/autotools/tsk_recover.cpp b/tools/autotools/tsk_recover.cpp
index 997bd60d574aa497a80e6b65bb011a762a1b0f72..9558cb273852acc90c52a0283cef348284c633a8 100755
--- a/tools/autotools/tsk_recover.cpp
+++ b/tools/autotools/tsk_recover.cpp
@@ -288,9 +288,9 @@ uint8_t TskRecover::writeFile(TSK_FS_FILE * a_fs_file, const char *a_path)
     }
 
     if (fbuf[strlen(fbuf) - 1] != '/')
-        strncat(fbuf, "/", PATH_MAX - strlen(fbuf));
+        strncat(fbuf, "/", PATH_MAX - strlen(fbuf)-1);
 
-    strncat(fbuf, a_fs_file->name->name, PATH_MAX - strlen(fbuf));
+    strncat(fbuf, a_fs_file->name->name, PATH_MAX - strlen(fbuf)-1);
     
     //do name mangling of the file name that was just added
     for (int i = strlen(fbuf)-1; fbuf[i] != '/'; i--) {
diff --git a/tools/fstools/fls.cpp b/tools/fstools/fls.cpp
index 41676b0b249f5ddbd4e3fa05edb46529b36cd563..fc0b91d837189c5b61fe90757843cacec16bb7e9 100644
--- a/tools/fstools/fls.cpp
+++ b/tools/fstools/fls.cpp
@@ -77,6 +77,8 @@ main(int argc, char **argv1)
 {
     TSK_IMG_TYPE_ENUM imgtype = TSK_IMG_TYPE_DETECT;
     TSK_IMG_INFO *img;
+    TSK_IMG_INFO *img_parent = NULL;
+    const TSK_POOL_INFO *pool = NULL;
 
     TSK_OFF_T imgaddr = 0;
     TSK_FS_TYPE_ENUM fstype = TSK_FS_TYPE_DETECT;
@@ -308,6 +310,7 @@ main(int argc, char **argv1)
             tsk_fprintf(stderr,
                 "Sector offset supplied is larger than disk image (maximum: %"
                 PRIu64 ")\n", img->size / img->sector_size);
+            tsk_img_close(img);
             exit(1);
         }
 
@@ -316,26 +319,29 @@ main(int argc, char **argv1)
                 tsk_error_print(stderr);
                 if (tsk_error_get_errno() == TSK_ERR_FS_UNSUPTYPE)
                     tsk_fs_type_print(stderr);
-                img->close(img);
+                tsk_img_close(img);
                 exit(1);
             }
         } else {
             // Pool block was specified, so open pool
-            const TSK_POOL_INFO *pool = tsk_pool_open_img_sing(img, imgaddr * img->sector_size, pooltype);
+            pool = tsk_pool_open_img_sing(img, imgaddr * img->sector_size, pooltype);
             if (pool == NULL) {
                 tsk_error_print(stderr);
                 if (tsk_error_get_errno() == TSK_ERR_FS_UNSUPTYPE)
                     tsk_pool_type_print(stderr);
-                img->close(img);
+                tsk_img_close(img);
                 exit(1);
             }
+            img_parent = img;
 
             img = pool->get_img_info(pool, (TSK_DADDR_T)pvol_block);
             if ((fs = tsk_fs_open_img_decrypt(img, imgaddr * img->sector_size, fstype, password)) == NULL) {
                 tsk_error_print(stderr);
                 if (tsk_error_get_errno() == TSK_ERR_FS_UNSUPTYPE)
                     tsk_fs_type_print(stderr);
-                img->close(img);
+                tsk_img_close(img);
+                tsk_pool_close(pool);
+                tsk_img_close(img_parent);
                 exit(1);
             }
         }
@@ -359,6 +365,7 @@ main(int argc, char **argv1)
             tsk_fprintf(stderr,
                 "Sector offset supplied is larger than disk image (maximum: %"
                 PRIu64 ")\n", img->size / img->sector_size);
+            tsk_img_close(img);
             exit(1);
         }
 
@@ -367,27 +374,30 @@ main(int argc, char **argv1)
                 tsk_error_print(stderr);
                 if (tsk_error_get_errno() == TSK_ERR_FS_UNSUPTYPE)
                     tsk_fs_type_print(stderr);
-                img->close(img);
+                tsk_img_close(img);
                 exit(1);
             }
         } else {
             // Pool block was specified, so open pool
-            const TSK_POOL_INFO *pool = tsk_pool_open_img_sing(img, imgaddr * img->sector_size, pooltype);
+            pool = tsk_pool_open_img_sing(img, imgaddr * img->sector_size, pooltype);
 
             if (pool == NULL) {
                 tsk_error_print(stderr);
                 if (tsk_error_get_errno() == TSK_ERR_FS_UNSUPTYPE)
                     tsk_pool_type_print(stderr);
-                img->close(img);
+                tsk_img_close(img);
                 exit(1);
             }
+            img_parent = img;
 
             img = pool->get_img_info(pool, (TSK_DADDR_T)pvol_block);
             if ((fs = tsk_fs_open_img_decrypt(img, imgaddr * img->sector_size, fstype, password)) == NULL) {
                 tsk_error_print(stderr);
                 if (tsk_error_get_errno() == TSK_ERR_FS_UNSUPTYPE)
                     tsk_fs_type_print(stderr);
-                img->close(img);
+                tsk_img_close(img);
+                tsk_pool_close(pool);
+                tsk_img_close(img_parent);
                 exit(1);
             }
         }
@@ -401,12 +411,24 @@ main(int argc, char **argv1)
             (TSK_FS_DIR_WALK_FLAG_ENUM) name_flags, macpre, sec_skew)) {
         tsk_error_print(stderr);
         fs->close(fs);
-        img->close(img);
+        tsk_img_close(img);
+        if (pool != NULL) {
+          tsk_pool_close(pool);
+        }
+        if (img_parent != NULL) {
+          tsk_img_close(img_parent);
+        }
         exit(1);
     }
 
     fs->close(fs);
-    img->close(img);
+    tsk_img_close(img);
 
+    if (pool != NULL) {
+      tsk_pool_close(pool);
+    }
+    if (img_parent != NULL) {
+      tsk_img_close(img_parent);
+    }
     exit(0);
 }
diff --git a/tsk/Makefile.am b/tsk/Makefile.am
index b623dd937c4dcf9813746669216cba4dd314aa79..21d3605dc99dea7cf280646b25f1e1329c534de9 100644
--- a/tsk/Makefile.am
+++ b/tsk/Makefile.am
@@ -8,6 +8,6 @@ libtsk_la_LIBADD = base/libtskbase.la img/libtskimg.la \
     vs/libtskvs.la fs/libtskfs.la hashdb/libtskhashdb.la \
     auto/libtskauto.la pool/libtskpool.la util/libtskutil.la
 # current:revision:age
-libtsk_la_LDFLAGS = -version-info 20:5:1 $(LIBTSK_LDFLAGS)
+libtsk_la_LDFLAGS = -version-info 20:6:1 $(LIBTSK_LDFLAGS)
 
 EXTRA_DIST = tsk_tools_i.h docs/Doxyfile docs/*.dox docs/*.html
diff --git a/tsk/auto/auto.cpp b/tsk/auto/auto.cpp
index b80c2832a88a59b6ecec896d1cce7108d3dbe683..8f633247d8159c1357f4e5943a84317080183805 100755
--- a/tsk/auto/auto.cpp
+++ b/tsk/auto/auto.cpp
@@ -322,8 +322,12 @@ TskAuto::findFilesInVs(TSK_OFF_T a_start, TSK_VS_TYPE_ENUM a_vtype)
     TSK_VS_INFO *vs_info;
     // Use mm_walk to get the volumes
     if ((vs_info = tsk_vs_open(m_img_info, a_start, a_vtype)) == NULL) {
-        /* we're going to ignore this error to avoid confusion if the
-         * fs_open passes. */
+
+        /* If the error code is for encryption, we will register it. Otherwise,
+         * ignore this error to avoid confusion if the fs_open passes. */
+        if (tsk_error_get_errno() == TSK_ERR_VS_ENCRYPTED) {
+            registerError();
+        }
         tsk_error_reset();
 
         if(tsk_verbose)
@@ -468,11 +472,22 @@ TskAuto::findFilesInPool(TSK_OFF_T start, TSK_POOL_TYPE_ENUM ptype)
                         }
                     }
                     else {
+                        if (vol_info->flags & TSK_POOL_VOLUME_FLAG_ENCRYPTED) {
+                            tsk_error_reset();
+                            tsk_error_set_errno(TSK_ERR_FS_ENCRYPTED);
+                            tsk_error_set_errstr(
+                                "Encrypted APFS file system");
+                            tsk_error_set_errstr2("Block: %" PRIdOFF, vol_info->block);
+                            registerError();
+                        }
+                        else {
+                            tsk_error_set_errstr2(
+                                "findFilesInPool: Error opening APFS file system");
+                            registerError();
+                        }
+
                         pool_img->close(pool_img);
                         pool->close(pool);
-                        tsk_error_set_errstr2(
-                            "findFilesInPool: Error opening APFS file system");
-                        registerError();
                         return TSK_ERR;
                     }
 
diff --git a/tsk/auto/is_image_supported.cpp b/tsk/auto/is_image_supported.cpp
index 84a50627ce253c31bf85b803aaa3030a1a9488fb..a5aced7ebc18a94ec918227be28c21c85d1007b9 100644
--- a/tsk/auto/is_image_supported.cpp
+++ b/tsk/auto/is_image_supported.cpp
@@ -2,7 +2,7 @@
  ** The Sleuth Kit
  **
  ** Brian Carrier [carrier <at> sleuthkit [dot] org]
- ** Copyright (c) 2010-2013 Brian Carrier.  All Rights reserved
+ ** Copyright (c) 2010-2021 Brian Carrier.  All Rights reserved
  **
  ** This software is distributed under the Common Public License 1.0
  **
@@ -25,40 +25,131 @@
 TskIsImageSupported::TskIsImageSupported()
 {
     m_wasDataFound = false;
+    m_wasEncryptionFound = false;
+    m_wasPossibleEncryptionFound = false;
+    m_wasFileSystemFound = false;
+    m_wasUnsupported = false;
+    m_encryptionDesc[0] = '\0';
+    m_possibleEncryptionDesc[0] = '\0';
+    m_unsupportedDesc[0] = '\0';
 }
 
 bool TskIsImageSupported::isImageSupported()
 {
-    return m_wasDataFound ;
+    return m_wasDataFound;
 }
 
+bool TskIsImageSupported::isImageEncrypted()
+{
+    return m_wasEncryptionFound;
+}
+
+void TskIsImageSupported::printResults() {
+
+    printf("Encryption: ");
+    if (!m_wasEncryptionFound && !m_wasPossibleEncryptionFound) {
+        printf("None");
+    }
+    else if (m_wasEncryptionFound) {
+        if (m_wasFileSystemFound) {
+            printf("Partial");
+        }
+        else {
+            printf("Full Disk");
+        }
+    }
+    else {
+        if (m_wasFileSystemFound) {
+            printf("Possible Partial");
+        }
+        else {
+            printf("Possible Full Disk");
+        }
+    }
+    printf("\n");
+
+    printf("Encryption Type: ");
+    if (strnlen(m_encryptionDesc, 1024) > 0) {
+        printf("%s", m_encryptionDesc);
+    } 
+    else if (strnlen(m_possibleEncryptionDesc, 1024) > 0) {
+        printf("%s", m_possibleEncryptionDesc);
+    }
+    else {
+        printf("None");
+    }
+    printf("\n");
+
+
+    printf("TSK Support: ");
+    if (m_wasFileSystemFound) {
+        printf("Yes");
+    }
+    else {
+        printf("No");
+        if (strnlen(m_unsupportedDesc, 1024) > 0) {
+            printf(" (%s)", m_unsupportedDesc);
+        }
+    }
+    printf("\n");
+}
 
 uint8_t TskIsImageSupported::handleError() 
 {
-    // we don't care about errors for this use case
-    //fprintf(stderr, "%s", tsk_error_get());
+    // If encryption was found, update the flags
+    TSK_ERROR_INFO* lastError = tsk_error_get_info();
+    if (lastError != NULL) {
+        uint32_t errCode = lastError->t_errno;
+
+        if (errCode == TSK_ERR_FS_ENCRYPTED || errCode == TSK_ERR_VS_ENCRYPTED) {
+            strncpy(m_encryptionDesc, lastError->errstr, 1024);
+            m_wasEncryptionFound = true;
+        }
+        else if (errCode == TSK_ERR_FS_POSSIBLY_ENCRYPTED) {
+            strncpy(m_possibleEncryptionDesc, lastError->errstr, 1024);
+            m_wasPossibleEncryptionFound = true;
+        }
+        else if (errCode == TSK_ERR_IMG_UNSUPTYPE) {
+            strncpy(m_unsupportedDesc, lastError->errstr, 1024);
+            m_wasUnsupported = true;
+        }
+    }
     return 0;
 }
 
-
 TSK_RETVAL_ENUM TskIsImageSupported::processFile(TSK_FS_FILE * /*fs_file*/,
                                                  const char * /*path*/)
 {
     return TSK_OK;
 }
 
-
 TSK_FILTER_ENUM
 TskIsImageSupported::filterFs(TSK_FS_INFO * /*fs_info*/)
 {
     m_wasDataFound = true;
+    m_wasFileSystemFound = true;
     return TSK_FILTER_SKIP;
 }
 
+TSK_FILTER_ENUM
+TskIsImageSupported::filterPool(const TSK_POOL_INFO * pool_info)
+{
+    // There's nothing to do, but we need to override this to allow the pool
+    // to be processed.
+    return TSK_FILTER_CONT;
+}
+
+TSK_FILTER_ENUM
+TskIsImageSupported::filterPoolVol(const TSK_POOL_VOLUME_INFO * pool_vol)
+{
+    // There's nothing to do, but we need to override this to allow the pool
+    // to be processed.
+    return TSK_FILTER_CONT;
+}
 
 TSK_FILTER_ENUM
 TskIsImageSupported::filterVol(const TSK_VS_PART_INFO * /*vs_part*/)
 {
     m_wasDataFound = true;
-    return TSK_FILTER_SKIP;
+    return TSK_FILTER_CONT;
 }
diff --git a/tsk/auto/tsk_db.h b/tsk/auto/tsk_db.h
index 0976f70e8be16488527d02bc240fe50e9bdf54d0..3724638dc68004ce34925615ff9667245e6f60e9 100755
--- a/tsk/auto/tsk_db.h
+++ b/tsk/auto/tsk_db.h
@@ -28,10 +28,10 @@ using std::vector;
 using std::string;
 
 /**
- * Keep these values in sync with CURRENT_DB_SCHEMA_VERSION in SleuthkitCase.java
- */
+* Do not change these values - the schema is no longer being updated in this code and does not match the current version in SleuthkitCase.java.
+*/
 #define TSK_SCHEMA_VER 8
-#define TSK_SCHEMA_MINOR_VER 6
+#define TSK_SCHEMA_MINOR_VER 4
 
 /**
  * Values for the type column in the tsk_objects table. 
diff --git a/tsk/auto/tsk_is_image_supported.h b/tsk/auto/tsk_is_image_supported.h
index 9993d8226dbe46894f826fa426c48de7ac2accd2..733abb0b14be2fbb0fefd8eed8a942d18bc560af 100644
--- a/tsk/auto/tsk_is_image_supported.h
+++ b/tsk/auto/tsk_is_image_supported.h
@@ -2,7 +2,7 @@
  ** The Sleuth Kit
  ** 
  ** Brian Carrier [carrier <at> sleuthkit [dot] org]
- ** Copyright (c) 2010-2013 Brian Carrier.  All Rights reserved
+ ** Copyright (c) 2010-2021 Brian Carrier.  All Rights reserved
  **
  ** This software is distributed under the Common Public License 1.0
  **
@@ -22,16 +22,30 @@
 
 
 #include "tsk/tsk_tools_i.h"
+#include "tsk/util/detect_encryption.h"
 
 class TskIsImageSupported:public TskAuto {
+
+
 public:
     TskIsImageSupported();
     virtual TSK_RETVAL_ENUM processFile(TSK_FS_FILE * fs_file, const char *path);
     virtual TSK_FILTER_ENUM filterVol(const TSK_VS_PART_INFO * vs_part);
+    virtual TSK_FILTER_ENUM filterPool(const TSK_POOL_INFO * pool_info);
+    virtual TSK_FILTER_ENUM filterPoolVol(const TSK_POOL_VOLUME_INFO * pool_vol);
     virtual TSK_FILTER_ENUM filterFs(TSK_FS_INFO * fs_info);
     virtual uint8_t handleError();
     bool isImageSupported();
+    bool isImageEncrypted();
+    void printResults();
     
 private:
     bool m_wasDataFound;
+    bool m_wasEncryptionFound;
+    bool m_wasPossibleEncryptionFound;
+    bool m_wasFileSystemFound;
+    bool m_wasUnsupported;
+    char m_encryptionDesc[1024];
+    char m_possibleEncryptionDesc[1024];
+    char m_unsupportedDesc[1024];
 };
\ No newline at end of file
diff --git a/tsk/base/mymalloc.c b/tsk/base/mymalloc.c
index 8c6b910d93a3f1258e8dc408eaccca75e4e5c3ce..efbf0aadcbd466518729625a4df7589e81d4c7eb 100644
--- a/tsk/base/mymalloc.c
+++ b/tsk/base/mymalloc.c
@@ -11,7 +11,7 @@
  * when an error occurs.
  */
 
-/*	The IBM Public Licence must be distributed with this software.
+/*	The IBM Public License must be distributed with this software.
 * AUTHOR(S)
 *	Wietse Venema
 *	IBM T.J. Watson Research
diff --git a/tsk/base/tsk_base.h b/tsk/base/tsk_base.h
index 1898eee3d3ef07a3479740a4bf99c9e6a599dce4..9c61526894d00d0a0b47816829be143b6a59de51 100644
--- a/tsk/base/tsk_base.h
+++ b/tsk/base/tsk_base.h
@@ -39,11 +39,11 @@
  * 3.1.2b1 would be 0x03010201.  Snapshot from Jan 2, 2003 would be
  * 0xFF030102.
  * See TSK_VERSION_STR for string form. */
-#define TSK_VERSION_NUM 0x041002ff
+#define TSK_VERSION_NUM 0x041100ff
 
 /** Version of code in string form. See TSK_VERSION_NUM for
  * integer form. */
-#define TSK_VERSION_STR "4.10.2"
+#define TSK_VERSION_STR "4.11.0"
 
 
 /* include the TSK-specific header file that we created in autoconf
@@ -332,7 +332,8 @@ extern "C" {
 #define TSK_ERR_VS_BUF		(TSK_ERR_VS | 5)
 #define TSK_ERR_VS_BLK_NUM	(TSK_ERR_VS | 6)
 #define TSK_ERR_VS_ARG	    (TSK_ERR_VS | 7)
-#define TSK_ERR_VS_MAX		8
+#define TSK_ERR_VS_ENCRYPTED    (TSK_ERR_VS | 8)
+#define TSK_ERR_VS_MAX		9
 
 #define TSK_ERR_POOL_UNKTYPE    (TSK_ERR_POOL | 0)
 #define TSK_ERR_POOL_UNSUPTYPE  (TSK_ERR_IMG | 1)
@@ -358,8 +359,9 @@ extern "C" {
 #define TSK_ERR_FS_GENFS	(TSK_ERR_FS | 15)
 #define TSK_ERR_FS_CORRUPT	(TSK_ERR_FS | 16)
 #define TSK_ERR_FS_ATTR_NOTFOUND (TSK_ERR_FS | 17)
-#define TSK_ERR_FS_MAX		18
-
+#define TSK_ERR_FS_ENCRYPTED    (TSK_ERR_FS | 18)
+#define TSK_ERR_FS_POSSIBLY_ENCRYPTED    (TSK_ERR_FS | 19)
+#define TSK_ERR_FS_MAX		20
 
 #define TSK_ERR_HDB_UNKTYPE     (TSK_ERR_HDB | 0)
 #define TSK_ERR_HDB_UNSUPTYPE   (TSK_ERR_HDB | 1)
diff --git a/tsk/base/tsk_error.c b/tsk/base/tsk_error.c
index 9648db5154addf156240924a9d2d64169c9b2ef0..cda9c369397ce2bb639415eab431cbb6a15e6fcc 100644
--- a/tsk/base/tsk_error.c
+++ b/tsk/base/tsk_error.c
@@ -55,6 +55,7 @@ static const char *tsk_err_mm_str[TSK_ERR_VS_MAX] = {
     "Invalid buffer size",      // 5
     "Invalid sector address",
     "Invalid API argument",
+    "Encryption detected",
 };
 
 static const char *tsk_err_fs_str[TSK_ERR_FS_MAX] = {
@@ -76,6 +77,8 @@ static const char *tsk_err_fs_str[TSK_ERR_FS_MAX] = {
     "General file system error",        // 15
     "File system is corrupt",
     "Attribute not found in file",
+    "Encryption detected",
+    "Possible encryption detected",
 };
 
 static const char *tsk_err_hdb_str[TSK_ERR_HDB_MAX] = {
diff --git a/tsk/docs/Doxyfile b/tsk/docs/Doxyfile
index a9062b9660712e6c2df99fa5d3122c73f480d7e1..d0f18c89ee26546ab484c3b59396ba1c0c52db34 100644
--- a/tsk/docs/Doxyfile
+++ b/tsk/docs/Doxyfile
@@ -33,7 +33,7 @@ PROJECT_NAME           = "The Sleuth Kit"
 # if some version control system is used.
 
 # This is automatically updated  at release time. 
-PROJECT_NUMBER = 4.10.2
+PROJECT_NUMBER = 4.11.0
 
 # Using the PROJECT_BRIEF tag one can provide an optional one line description
 # for a project that appears at the top of each page and should give viewer
@@ -883,7 +883,7 @@ GENERATE_HTML          = YES
 # put in front of it. If left blank `html' will be used as the default path.
 
 # NOTE: This is automatically updated at release time. 
-HTML_OUTPUT = api-docs/4.10.2/
+HTML_OUTPUT = api-docs/4.11.0/
 
 # The HTML_FILE_EXTENSION tag can be used to specify the file extension for
 # each generated HTML page (for example: .htm,.php,.asp). If it is left blank
diff --git a/tsk/fs/apfs_compat.cpp b/tsk/fs/apfs_compat.cpp
index 1ec0b2546e75abd3234b5443a25ebbbf1d49ef8a..4afacd5cb48f97a194d4bcced74c9962425e33aa 100755
--- a/tsk/fs/apfs_compat.cpp
+++ b/tsk/fs/apfs_compat.cpp
@@ -692,6 +692,7 @@ uint8_t APFSFSCompat::file_add_meta(TSK_FS_FILE* fs_file, TSK_INUM_T addr) const
     for (int i = 0; i < num_attrs; i++) {
       const auto attr = tsk_fs_file_attr_get_idx(fs_file, i);
       if (attr->type == TSK_FS_ATTR_TYPE_APFS_EXT_ATTR &&
+          attr->name != NULL &&
           strcmp(attr->name, APFS_XATTR_NAME_SYMLINK) == 0) {
         // We've found our symlink attribute
         fs_file->meta->link = (char*)tsk_malloc(attr->size + 1);
diff --git a/tsk/fs/decmpfs.c b/tsk/fs/decmpfs.c
index 7e522172ef21c3230ca4d4d46258a33c67025cf5..ff59281bf8a679aca2f4cc55cb9a8d792ecbdf36 100644
--- a/tsk/fs/decmpfs.c
+++ b/tsk/fs/decmpfs.c
@@ -1,3 +1,10 @@
+/* This file contains decompression routines used by APFS and HFS
+ * It has one method derived from public domain ZLIB and others
+ * that are TSK-specific. 
+ *
+ * It would probably be cleaner to separate these into two files.
+ */
+
 #include "../libtsk.h"
 #include "tsk_fs_i.h"
 #include "decmpfs.h"
@@ -13,7 +20,13 @@
 
 /***************** ZLIB stuff *******************************/
 
-// Adapted from zpipe.c (part of zlib) at http://zlib.net/zpipe.c
+/* The zlib_inflate method is adapted from the public domain
+ * zpipe.c (part of zlib) at http://zlib.net/zpipe.c
+ *
+ * zpipe.c: example of proper use of zlib's inflate() and deflate()
+ * Not copyrighted -- provided to the public domain
+ * Version 1.4  11 December 2005  Mark Adler */
+
 #define CHUNK 16384
 
 /*
@@ -140,6 +153,20 @@ zlib_inflate(char *source, uint64_t sourceLen, char *dest, uint64_t destLen, uin
 
 #endif
 
+
+
+/********************* TSK STUFF **********************/
+
+/*
+ * The Sleuth Kit
+ *
+ * Brian Carrier [carrier <at> sleuthkit [dot] org]
+ * Copyright (c) 2019-2020 Brian Carrier.  All Rights reserved
+ * Copyright (c) 2018-2019 BlackBag Technologies.  All Rights reserved
+ *
+ * This software is distributed under the Common Public License 1.0
+ */
+
 typedef struct {
     uint32_t offset;
     uint32_t length;
diff --git a/tsk/fs/decmpfs.h b/tsk/fs/decmpfs.h
index 4ab40fd181472afbe135776ced488926dfd409c5..2bcfce8e1110c797979098c82047f984db6da600 100644
--- a/tsk/fs/decmpfs.h
+++ b/tsk/fs/decmpfs.h
@@ -1,3 +1,12 @@
+/*
+ * The Sleuth Kit
+ *
+ * Brian Carrier [carrier <at> sleuthkit [dot] org]
+ * Copyright (c) 2018-2019 BlackBag Technologies.  All Rights reserved
+ *
+ * This software is distributed under the Common Public License 1.0
+ */
+
 #pragma once
 
 #include <stdint.h>
@@ -8,6 +17,11 @@
 extern "C" {
 #endif
 
+/**
+ * Contains the structures and function APIs dealing with compressed files
+ * in APFS and HFS+ file systems.
+ */
+
 /*
  * If a file is compressed, then it will have an extended attribute
  * with name com.apple.decmpfs.  The value of that attribute is a data
diff --git a/tsk/fs/exfatfs_meta.c b/tsk/fs/exfatfs_meta.c
index cd895319a43471f2c6305800ad38cef0108e0d96..71bbcf152382f4bf0aaa743e911d0c9586bf1978 100755
--- a/tsk/fs/exfatfs_meta.c
+++ b/tsk/fs/exfatfs_meta.c
@@ -50,12 +50,10 @@ exfatfs_is_cluster_alloc(FATFS_INFO *a_fatfs, TSK_DADDR_T a_cluster_addr)
     uint8_t bitmap_byte;
     ssize_t bytes_read = 0;
 
-    assert(a_fatfs != NULL);
     if (fatfs_ptr_arg_is_null(a_fatfs, "a_fatfs", func_name)) {
         return -1;
     }
 
-    assert((a_cluster_addr >= FATFS_FIRST_CLUSTER_ADDR) && (a_cluster_addr <= a_fatfs->lastclust));
     if ((a_cluster_addr < FATFS_FIRST_CLUSTER_ADDR) || (a_cluster_addr > a_fatfs->lastclust)) {
         tsk_error_reset();
         tsk_error_set_errno(TSK_ERR_FS_ARG);
@@ -109,7 +107,6 @@ exfatfs_is_vol_label_dentry(FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_ALLOC_STATUS
     EXFATFS_VOL_LABEL_DIR_ENTRY *dentry = (EXFATFS_VOL_LABEL_DIR_ENTRY*)a_dentry;
     uint8_t i = 0;
     
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -176,7 +173,6 @@ exfatfs_is_vol_guid_dentry(FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_ALLOC_STATUS_
     const char *func_name = "exfatfs_is_vol_guid_dentry";
     EXFATFS_VOL_GUID_DIR_ENTRY *dentry = (EXFATFS_VOL_GUID_DIR_ENTRY*)a_dentry;
     
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -217,7 +213,6 @@ exfatfs_is_alloc_bitmap_dentry(FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_ALLOC_STA
     uint32_t first_cluster_of_bitmap = 0;
     uint64_t length_of_alloc_bitmap_in_bytes = 0;
 
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -297,7 +292,6 @@ exfatfs_is_upcase_table_dentry(FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_ALLOC_STA
     uint64_t table_size = 0;
     uint32_t first_cluster_of_table = 0;
 
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -377,7 +371,6 @@ exfatfs_is_texfat_dentry(FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_ALLOC_STATUS_EN
     const char *func_name = "exfatfs_is_texfat_dentry";
     EXFATFS_TEXFAT_DIR_ENTRY *dentry = (EXFATFS_TEXFAT_DIR_ENTRY*)a_dentry;
     
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -412,7 +405,6 @@ exfatfs_is_access_ctrl_table_dentry(FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_ALLO
     const char *func_name = "exfatfs_is_texfat_dentry";
     EXFATFS_TEXFAT_DIR_ENTRY *dentry = (EXFATFS_TEXFAT_DIR_ENTRY*)a_dentry;
     
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -476,7 +468,6 @@ exfatfs_is_file_dentry_standalone(FATFS_DENTRY *a_dentry, TSK_ENDIAN_ENUM a_endi
     const char *func_name = "exfatfs_is_file_dentry";
     EXFATFS_FILE_DIR_ENTRY *dentry = (EXFATFS_FILE_DIR_ENTRY*)a_dentry;
 
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -587,7 +578,6 @@ exfatfs_is_file_stream_dentry_standalone(FATFS_DENTRY *a_dentry, TSK_ENDIAN_ENUM
     uint64_t file_size = 0;
     uint32_t first_cluster = 0;
 
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -647,7 +637,6 @@ exfatfs_is_file_name_dentry(FATFS_DENTRY *a_dentry)
     const char *func_name = "exfatfs_is_file_name_dentry";
     EXFATFS_FILE_NAME_DIR_ENTRY *dentry = (EXFATFS_FILE_NAME_DIR_ENTRY*)a_dentry;
     
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
@@ -675,7 +664,6 @@ exfatfs_is_dentry(FATFS_INFO *a_fatfs, FATFS_DENTRY *a_dentry, FATFS_DATA_UNIT_A
 {
     const char *func_name = "exfatfs_is_dentry";
 
-    assert(a_dentry != NULL);
     if (fatfs_ptr_arg_is_null(a_dentry, "a_dentry", func_name)) {
         return 0;
     }
diff --git a/tsk/fs/ext2fs.c b/tsk/fs/ext2fs.c
index cc42ff5f51c61976053dad83a9fb5bfdaa321d37..29bed30ce79d2e726a1a21de0e64a613ab6dbb65 100755
--- a/tsk/fs/ext2fs.c
+++ b/tsk/fs/ext2fs.c
@@ -427,6 +427,9 @@ static uint8_t
         return 1;
     }
 
+    // Ensure the bitmap buffer is initialized.
+    memset(ext2fs->imap_buf, 0, fs->block_size);
+
     cnt = tsk_fs_read(fs, addr * fs->block_size, 
         (char *) ext2fs->imap_buf, ext2fs->fs_info.block_size);
 
@@ -650,7 +653,7 @@ ext4_load_attrs_inline(TSK_FS_FILE *fs_file, const uint8_t * ea_buf, size_t ea_b
             if (index + sizeof(ext2fs_ea_entry) + strlen("data") > ea_buf_len) {
                 break;
             }
-            ext2fs_ea_entry *ea_entry = (ext2fs_ea_entry*) &(ea_buf[index]);
+            ea_entry = (ext2fs_ea_entry*) &(ea_buf[index]);
         }
     }
 
@@ -674,7 +677,9 @@ ext4_load_attrs_inline(TSK_FS_FILE *fs_file, const uint8_t * ea_buf, size_t ea_b
         memcpy(resident_data + inode_data_len, ea_inline_data, ea_data_len);
     }
 
-    fs_meta->attr = tsk_fs_attrlist_alloc();
+    if (fs_meta->attr == NULL) {
+        fs_meta->attr = tsk_fs_attrlist_alloc();
+    }
     if ((fs_attr =
         tsk_fs_attrlist_getnew(fs_meta->attr,
             TSK_FS_ATTR_RES)) == NULL) {
@@ -978,7 +983,7 @@ ext2fs_dinode_copy(EXT2FS_INFO * ext2fs, TSK_FS_FILE * fs_file,
     /*
      * Ensure that inum - ibase refers to a valid bit offset in imap_buf.
      */
-    if ((inum - ibase) > fs->block_size*8) {
+    if ((ibase > inum) || (inum - ibase) >= (fs->block_size * 8)) {
         tsk_release_lock(&ext2fs->lock);
         tsk_error_reset();
         tsk_error_set_errno(TSK_ERR_FS_WALK_RNG);
@@ -1155,11 +1160,15 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
 
     }
 
-    if ((fs_file = tsk_fs_file_alloc(fs)) == NULL)
+    fs_file = tsk_fs_file_alloc(fs);
+    if (fs_file == NULL)
         return 1;
-    if ((fs_file->meta =
-            tsk_fs_meta_alloc(EXT2FS_FILE_CONTENT_LEN)) == NULL)
+
+    fs_file->meta = tsk_fs_meta_alloc(EXT2FS_FILE_CONTENT_LEN);
+    if (fs_file->meta == NULL) {
+        free(fs_file);
         return 1;
+    }
 
     // we need to handle fs->last_inum specially because it is for the
     // virtual ORPHANS directory.  Handle it outside of the loop.
@@ -1174,7 +1183,11 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
     size =
         ext2fs->inode_size >
         sizeof(ext2fs_inode) ? ext2fs->inode_size : sizeof(ext2fs_inode);
-    if ((dino_buf = (ext2fs_inode *) tsk_malloc(size)) == NULL) {
+
+    dino_buf = (ext2fs_inode *) tsk_malloc(size);
+    if (dino_buf == NULL) {
+        free(fs_file->meta);
+        free(fs_file);
         return 1;
     }
 
@@ -1196,6 +1209,7 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
         if (ext2fs_imap_load(ext2fs, grp_num)) {
             tsk_release_lock(&ext2fs->lock);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
             return 1;
         }
         ibase =
@@ -1205,9 +1219,11 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
         /*
          * Ensure that inum - ibase refers to a valid bit offset in imap_buf.
          */
-        if ((inum - ibase) > fs->block_size*8) {
+        if ((ibase > inum) || (inum - ibase) >= (fs->block_size * 8)) {
             tsk_release_lock(&ext2fs->lock);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             tsk_error_reset();
             tsk_error_set_errno(TSK_ERR_FS_WALK_RNG);
             tsk_error_set_errstr("%s: Invalid offset into imap_buf (inum %" PRIuINUM " - ibase %" PRIuINUM ")",
@@ -1227,8 +1243,9 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
             continue;
 
         if (ext2fs_dinode_load(ext2fs, inum, dino_buf, &ea_buf, &ea_buf_len)) {
-            tsk_fs_file_close(fs_file);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 1;
         }
 
@@ -1257,20 +1274,23 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
          * to the application.
          */
         if (ext2fs_dinode_copy(ext2fs, fs_file, inum, dino_buf, ea_buf, ea_buf_len)) {
-            tsk_fs_meta_close(fs_file->meta);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 1;
         }
 
         retval = a_action(fs_file, a_ptr);
         if (retval == TSK_WALK_STOP) {
-            tsk_fs_file_close(fs_file);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 0;
         }
         else if (retval == TSK_WALK_ERROR) {
-            tsk_fs_file_close(fs_file);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 1;
         }
     }
@@ -1282,8 +1302,9 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
         int retval;
 
         if (tsk_fs_dir_make_orphan_dir_meta(fs, fs_file->meta)) {
-            tsk_fs_file_close(fs_file);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 1;
         }
         /* call action */
@@ -1291,11 +1312,14 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
         if (retval == TSK_WALK_STOP) {
             tsk_fs_file_close(fs_file);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 0;
         }
         else if (retval == TSK_WALK_ERROR) {
-            tsk_fs_file_close(fs_file);
             free(dino_buf);
+            tsk_fs_file_close(fs_file);
+
             return 1;
         }
     }
@@ -1303,8 +1327,8 @@ ext2fs_inode_walk(TSK_FS_INFO * fs, TSK_INUM_T start_inum,
     /*
      * Cleanup.
      */
-    tsk_fs_file_close(fs_file);
     free(dino_buf);
+    tsk_fs_file_close(fs_file);
 
     return 0;
 }
@@ -1637,9 +1661,16 @@ ext2fs_make_data_run_extent_index(TSK_FS_INFO * fs_info,
 
     /* process leaf nodes */
     if (tsk_getu16(fs_info->endian, header->eh_depth) == 0) {
+        uint16_t num_entries = tsk_getu16(fs_info->endian, header->eh_entries);
+
+        // Ensure buf is sufficiently large
+        // Otherwise extents[i] below can cause an OOB read
+        if ((fs_blocksize < sizeof(ext2fs_extent_header)) || (num_entries > (fs_blocksize - sizeof(ext2fs_extent_header)) / sizeof(ext2fs_extent))) {
+            free(buf);
+            return 1;
+        }
         ext2fs_extent *extents = (ext2fs_extent *) (header + 1);
-        for (i = 0; i < tsk_getu16(fs_info->endian, header->eh_entries);
-            i++) {
+        for (i = 0; i < num_entries; i++) {
             ext2fs_extent extent = extents[i];
             if (ext2fs_make_data_run_extent(fs_info, fs_attr, &extent)) {
                 free(buf);
@@ -1649,9 +1680,16 @@ ext2fs_make_data_run_extent_index(TSK_FS_INFO * fs_info,
     }
     /* recurse on interior nodes */
     else {
+        uint16_t num_entries = tsk_getu16(fs_info->endian, header->eh_entries);
+
+        // Ensure buf is sufficiently large
+        // Otherwise indices[i] below can cause an OOB read
+        if ((fs_blocksize < sizeof(ext2fs_extent_header)) || (num_entries > (fs_blocksize - sizeof(ext2fs_extent_header)) / sizeof(ext2fs_extent_idx))) {
+            free(buf);
+            return 1;
+        }
         ext2fs_extent_idx *indices = (ext2fs_extent_idx *) (header + 1);
-        for (i = 0; i < tsk_getu16(fs_info->endian, header->eh_entries);
-            i++) {
+        for (i = 0; i < num_entries; i++) {
             ext2fs_extent_idx *index = &indices[i];
             TSK_DADDR_T child_block =
                 (((uint32_t) tsk_getu16(fs_info->endian,
@@ -1861,10 +1899,9 @@ ext4_load_attrs_extents(TSK_FS_FILE *fs_file)
     }
     
     if (depth == 0) {       /* leaf node */
-        if (num_entries >
-            (fs_info->block_size -
-             sizeof(ext2fs_extent_header)) /
-            sizeof(ext2fs_extent)) {
+        // Ensure fs_meta->content_ptr is sufficiently large
+        // Otherwise extents[i] below can cause an OOB read
+        if ((fs_meta->content_len < sizeof(ext2fs_extent_header)) || (num_entries > (fs_meta->content_len - sizeof(ext2fs_extent_header)) / sizeof(ext2fs_extent))) {
             tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
             tsk_error_set_errstr
             ("ext2fs_load_attr: Inode reports too many extents");
@@ -1882,11 +1919,10 @@ ext4_load_attrs_extents(TSK_FS_FILE *fs_file)
     else {                  /* interior node */
         TSK_FS_ATTR *fs_attr_extent;
         int32_t extent_index_size;
-        
-        if (num_entries >
-            (fs_info->block_size -
-             sizeof(ext2fs_extent_header)) /
-            sizeof(ext2fs_extent_idx)) {
+
+        // Ensure fs_meta->content_ptr is sufficiently large
+        // Otherwise indices[i] below can cause an OOB read
+        if ((fs_meta->content_len < sizeof(ext2fs_extent_header)) || (num_entries > (fs_meta->content_len - sizeof(ext2fs_extent_header)) / sizeof(ext2fs_extent_idx))) {
             tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
             tsk_error_set_errstr
             ("ext2fs_load_attr: Inode reports too many extent indices");
diff --git a/tsk/fs/fatfs_meta.c b/tsk/fs/fatfs_meta.c
index 2a3b28153280c1456b578b55aae8b523635cfbd1..6a21b3fe73f1150a9c211db68b11edca4ffb228e 100755
--- a/tsk/fs/fatfs_meta.c
+++ b/tsk/fs/fatfs_meta.c
@@ -759,6 +759,11 @@ fatfs_make_data_runs(TSK_FS_FILE * a_fs_file)
                 tsk_error_set_errstr
                     ("%s: Invalid sector address in FAT (too large): %"
                     PRIuDADDR " (plus %d sectors)", func_name, sbase, fatfs->csize);
+                tsk_fs_attr_run_free(data_run_head);
+                if (list_seen != NULL) {
+                    tsk_list_free(list_seen);
+                    list_seen = NULL;
+                }
                 return 1;
             }
 
@@ -770,6 +775,10 @@ fatfs_make_data_runs(TSK_FS_FILE * a_fs_file)
                 if (data_run_tmp == NULL) {
                     tsk_fs_attr_run_free(data_run_head);
                     fs_meta->attr_state = TSK_FS_META_ATTR_ERROR;
+                    if (list_seen != NULL) {
+                        tsk_list_free(list_seen);
+                        list_seen = NULL;
+                    }
                     return 1;
                 }
 
@@ -798,8 +807,10 @@ fatfs_make_data_runs(TSK_FS_FILE * a_fs_file)
                         "  cluster: %" PRIuDADDR, func_name, fs_meta->addr, clust);
                     fs_meta->attr_state = TSK_FS_META_ATTR_ERROR;
                     tsk_fs_attr_run_free(data_run_head);
-                    tsk_list_free(list_seen);
-                    list_seen = NULL;
+                    if (list_seen != NULL) {
+                        tsk_list_free(list_seen);
+                        list_seen = NULL;
+                    }
                     return 1;
                 }
                 clust = nxt;
@@ -809,13 +820,21 @@ fatfs_make_data_runs(TSK_FS_FILE * a_fs_file)
                     if (tsk_verbose)
                         tsk_fprintf(stderr,
                             "Loop found while processing file\n");
+                    tsk_fs_attr_run_free(data_run_head);
+                    if (list_seen != NULL) {
+                        tsk_list_free(list_seen);
+                        list_seen = NULL;
+                    }
                     break;
                 }
 
                 if (tsk_list_add(&list_seen, clust)) {
                     fs_meta->attr_state = TSK_FS_META_ATTR_ERROR;
-                    tsk_list_free(list_seen);
-                    list_seen = NULL;
+                    tsk_fs_attr_run_free(data_run_head);
+                    if (list_seen != NULL) {
+                        tsk_list_free(list_seen);
+                        list_seen = NULL;
+                    }
                     return 1;
                 }
             }
@@ -826,6 +845,10 @@ fatfs_make_data_runs(TSK_FS_FILE * a_fs_file)
                 tsk_fs_attrlist_getnew(fs_meta->attr,
                     TSK_FS_ATTR_NONRES)) == NULL) {
             fs_meta->attr_state = TSK_FS_META_ATTR_ERROR;
+            if (list_seen != NULL) {
+                tsk_list_free(list_seen);
+                list_seen = NULL;
+            }
             return 1;
         }
 
@@ -835,6 +858,11 @@ fatfs_make_data_runs(TSK_FS_FILE * a_fs_file)
                 fs_meta->size, fs_meta->size, roundup(fs_meta->size,
                     fatfs->csize * fs->block_size), 0, 0)) {
             fs_meta->attr_state = TSK_FS_META_ATTR_ERROR;
+            tsk_fs_attr_run_free(data_run_head);
+            if (list_seen != NULL) {
+                tsk_list_free(list_seen);
+                list_seen = NULL;
+            }
             return 1;
         }
 
diff --git a/tsk/fs/fs_dir.c b/tsk/fs/fs_dir.c
index ae8e36202ab224407dc7a63f225a37c9192b1999..b80c0012fbaa0895f28c27fe24c01ef5f76d2db5 100644
--- a/tsk/fs/fs_dir.c
+++ b/tsk/fs/fs_dir.c
@@ -790,6 +790,9 @@ tsk_fs_dir_walk_lcl(TSK_FS_INFO * a_fs, DENT_DINFO * a_dinfo,
                             PRIuINUM " exceeded max length / depth\n", fs_file->name->meta_addr);
                     }
 
+                    tsk_fs_dir_close(fs_dir);
+                    fs_file->name = NULL;
+                    tsk_fs_file_close(fs_file);
                     if (indexToOrderedIndex != NULL) {
                         free(indexToOrderedIndex);
                     }
@@ -801,7 +804,7 @@ tsk_fs_dir_walk_lcl(TSK_FS_INFO * a_fs, DENT_DINFO * a_dinfo,
                 strncpy(a_dinfo->didx[a_dinfo->depth],
                     fs_file->name->name,
                     DIR_STRSZ - strlen(a_dinfo->dirs));
-                strncat(a_dinfo->dirs, "/", DIR_STRSZ);
+                strncat(a_dinfo->dirs, "/", DIR_STRSZ-1);
                 depth_added = 1;
                 a_dinfo->depth++;
 
diff --git a/tsk/fs/fs_name.c b/tsk/fs/fs_name.c
index a687fcf8bb62fcd65777ecc83424cfdecf1f6edc..913e408eb3d679fa898d8ab93d6346a8a0fdd831 100755
--- a/tsk/fs/fs_name.c
+++ b/tsk/fs/fs_name.c
@@ -156,17 +156,22 @@ tsk_fs_name_copy(TSK_FS_NAME * a_fs_name_to,
 
     /* If the source has a full name,  copy it */
     if (a_fs_name_from->name) {
+        size_t name_len = strlen(a_fs_name_from->name);
+
         // make sure there is enough space
-        if (strlen(a_fs_name_from->name) >= a_fs_name_to->name_size) {
-            a_fs_name_to->name_size = strlen(a_fs_name_from->name) + 16;
-            a_fs_name_to->name =
+        if (name_len >= a_fs_name_to->name_size) {
+            char * to_name =
                 (char *) tsk_realloc(a_fs_name_to->name,
-                a_fs_name_to->name_size);
-            if (a_fs_name_to->name == NULL)
+                name_len + 16);
+            if (to_name == NULL)
                 return 1;
+
+            a_fs_name_to->name = to_name;
+            a_fs_name_to->name_size = name_len + 16;
         }
         strncpy(a_fs_name_to->name, a_fs_name_from->name,
-            a_fs_name_to->name_size);
+            name_len);
+        a_fs_name_to->name[name_len] = 0;
     }
     else {
         if (a_fs_name_to->name_size > 0)
diff --git a/tsk/fs/fs_open.c b/tsk/fs/fs_open.c
old mode 100755
new mode 100644
index 3a564f0dc5ef2eefd612d7cbfb7d144e6b838c50..c3e96e4e3a2a6a2f2af63310f834b01da4320b52
--- a/tsk/fs/fs_open.c
+++ b/tsk/fs/fs_open.c
@@ -25,6 +25,8 @@
  --*/
 
 #include "tsk_fs_i.h"
+#include "tsk/util/detect_encryption.h"
+#include "tsk/img/unsupported_types.h"
 
 /**
  * \file fs_open.c
@@ -194,7 +196,43 @@ tsk_fs_open_img_decrypt(TSK_IMG_INFO * a_img_info, TSK_OFF_T a_offset,
 
         if (fs_first == NULL) {
             tsk_error_reset();
-            tsk_error_set_errno(TSK_ERR_FS_UNKTYPE);
+
+            // If we're still at the start of the image and haven't identified any volume systems or file
+            // systems, check if the image type is a known unsupported type.
+            int unsupportedSignatureFound = 0;
+            if (a_offset == 0) {
+                char * imageType = detectUnsupportedImageType(a_img_info);
+                if (imageType != NULL) {
+                    unsupportedSignatureFound = 1;
+                    tsk_error_reset();
+                    tsk_error_set_errno(TSK_ERR_IMG_UNSUPTYPE);
+                    tsk_error_set_errstr(imageType);
+                    free(imageType);
+                }
+            }
+
+            if (!unsupportedSignatureFound) {
+                // Check if the file system appears to be encrypted
+                encryption_detected_result* result = detectVolumeEncryption(a_img_info, a_offset);
+                if (result != NULL) {
+                    if (result->encryptionType == ENCRYPTION_DETECTED_SIGNATURE) {
+                        tsk_error_set_errno(TSK_ERR_FS_ENCRYPTED);
+                        tsk_error_set_errstr(result->desc);
+                    }
+                    else if (result->encryptionType == ENCRYPTION_DETECTED_ENTROPY) {
+                        tsk_error_set_errno(TSK_ERR_FS_POSSIBLY_ENCRYPTED);
+                        tsk_error_set_errstr(result->desc);
+                    }
+                    else {
+                        tsk_error_set_errno(TSK_ERR_FS_UNKTYPE);
+                    }
+                    free(result);
+                    result = NULL;
+                }
+                else {
+                    tsk_error_set_errno(TSK_ERR_FS_UNKTYPE);
+                }
+            }
         }
         return fs_first;
     }
diff --git a/tsk/fs/hfs.c b/tsk/fs/hfs.c
index 22618534ee3f0411b88407eae8217a863d0b8eec..233268fa6c7faaeaec4b4d11b5b910923415f5ca 100644
--- a/tsk/fs/hfs.c
+++ b/tsk/fs/hfs.c
@@ -469,11 +469,21 @@ hfs_ext_find_extent_record_attr(HFS_INFO * hfs, uint32_t cnid,
                 size_t rec_off;
                 hfs_btree_key_ext *key;
 
+                // Make sure node is large enough, note that (rec + 1) * 2 is an offset
+                // relative to the end of node
+                if ((rec + 1) * 2 > (int) nodesize) {
+                    tsk_error_set_errno(TSK_ERR_FS_GENFS);
+                    tsk_error_set_errstr
+                        ("hfs_ext_find_extent_record: offset of record %d in leaf node %d too small (%"
+                        PRIu16 ")", rec, cur_node, nodesize);
+                    free(node);
+                    return 1;
+                }
                 // get the record offset in the node
                 rec_off =
                     tsk_getu16(fs->endian,
                     &node[nodesize - (rec + 1) * 2]);
-                if (rec_off + sizeof(hfs_btree_key_ext) > nodesize) {
+                if (rec_off > nodesize - sizeof(hfs_btree_key_ext)) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
                     tsk_error_set_errstr
                         ("hfs_ext_find_extent_record_attr: offset of record %d in index node %d too large (%d vs %"
@@ -502,7 +512,7 @@ hfs_ext_find_extent_record_attr(HFS_INFO * hfs, uint32_t cnid,
                     int keylen =
                         2 + hfs_get_idxkeylen(hfs, tsk_getu16(fs->endian,
                             key->key_len), &(hfs->extents_header));
-                    if (rec_off + keylen > nodesize) {
+                    if ((nodesize < 4) || (keylen > nodesize - 4) || (rec_off >= nodesize - 4 - keylen)) {
                         tsk_error_set_errno(TSK_ERR_FS_GENFS);
                         tsk_error_set_errstr
                             ("hfs_ext_find_extent_record_attr: offset and keylenth of record %d in index node %d too large (%d vs %"
@@ -554,11 +564,22 @@ hfs_ext_find_extent_record_attr(HFS_INFO * hfs, uint32_t cnid,
                 int keylen;
                 TSK_FS_ATTR_RUN *attr_run;
 
+                // Make sure node is large enough, note that (rec + 1) * 2 is an offset
+                // relative to the end of node
+                if ((rec + 1) * 2 > (int) nodesize) {
+                    tsk_error_set_errno(TSK_ERR_FS_GENFS);
+                    tsk_error_set_errstr
+                        ("hfs_ext_find_extent_record_attr: offset of record %d in leaf node %d too small (%"
+                        PRIu16 ")", rec, cur_node, nodesize);
+                    free(node);
+                    return 1;
+                }
                 // get the record offset in the node
                 rec_off =
                     tsk_getu16(fs->endian,
                     &node[nodesize - (rec + 1) * 2]);
-                if (rec_off > nodesize) {
+
+                if (rec_off >= nodesize - sizeof(hfs_btree_key_ext)) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
                     tsk_error_set_errstr
                         ("hfs_ext_find_extent_record_attr: offset of record %d in leaf node %d too large (%d vs %"
@@ -687,11 +708,17 @@ hfs_ext_find_extent_record_attr(HFS_INFO * hfs, uint32_t cnid,
  */
 int
 hfs_cat_compare_keys(HFS_INFO * hfs, const hfs_btree_key_cat * key1,
-    const hfs_btree_key_cat * key2)
+    int keylen1, const hfs_btree_key_cat * key2)
 {
     TSK_FS_INFO *fs = (TSK_FS_INFO *) & (hfs->fs_info);
     uint32_t cnid1, cnid2;
 
+    if (keylen1 < 6) {
+        // Note that it would be better to return an error value here
+        // but the current function interface does not support this
+        // Also see issue #2365
+        return -1;
+    }
     cnid1 = tsk_getu32(fs->endian, key1->parent_cnid);
     cnid2 = tsk_getu32(fs->endian, key2->parent_cnid);
 
@@ -700,7 +727,7 @@ hfs_cat_compare_keys(HFS_INFO * hfs, const hfs_btree_key_cat * key1,
     if (cnid1 > cnid2)
         return 1;
 
-    return hfs_unicode_compare(hfs, &key1->name, &key2->name);
+    return hfs_unicode_compare(hfs, &key1->name, keylen1 - 6, &key2->name);
 }
 
 
@@ -821,11 +848,23 @@ hfs_cat_traverse(HFS_INFO * hfs,
                 uint8_t retval;
                 int keylen;
 
+                // Make sure node is large enough, note that (rec + 1) * 2 is an offset
+                // relative to the end of node
+                if ((rec + 1) * 2 > (int) nodesize) {
+                    tsk_error_set_errno(TSK_ERR_FS_GENFS);
+                    tsk_error_set_errstr
+                        ("hfs_cat_traverse: offset of record %d in leaf node %d too small (%"
+                        PRIu16 ")", rec, cur_node, nodesize);
+                    free(node);
+                    return 1;
+                }
                 // get the record offset in the node
                 rec_off =
                     tsk_getu16(fs->endian,
                     &node[nodesize - (rec + 1) * 2]);
-                if (rec_off > nodesize) {
+
+                // Need at least 2 bytes for key_len
+                if (rec_off >= nodesize - 2) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
                     tsk_error_set_errstr
                         ("hfs_cat_traverse: offset of record %d in index node %d too large (%d vs %"
@@ -838,10 +877,11 @@ hfs_cat_traverse(HFS_INFO * hfs,
                 key = (hfs_btree_key_cat *) & node[rec_off];
                 keylen = 2 + tsk_getu16(hfs->fs_info.endian, key->key_len);
 
-                if (keylen > nodesize - rec_off) {
+                // Want a key of at least 6 bytes, the size of the first 2 members of hfs_btree_key_cat
+                if ((keylen < 6) || (keylen > nodesize - rec_off)) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
                     tsk_error_set_errstr
-                        ("hfs_cat_traverse: length of key %d in index node %d too large (%d vs %"
+                        ("hfs_cat_traverse: length of key %d in index node %d out of bounds (6 < %d < %"
                         PRIu16 ")", rec, cur_node, keylen, (nodesize - rec_off));
                     free(node);
                     return 1;
@@ -859,7 +899,7 @@ hfs_cat_traverse(HFS_INFO * hfs,
 
                 /* save the info from this record unless it is too big */
                 retval =
-                    a_cb(hfs, HFS_BT_NODE_TYPE_IDX, key,
+                    a_cb(hfs, HFS_BT_NODE_TYPE_IDX, key, keylen, nodesize,
                     cur_off + rec_off, ptr);
                 if (retval == HFS_BTREE_CB_ERR) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
@@ -931,11 +971,23 @@ hfs_cat_traverse(HFS_INFO * hfs,
                 uint8_t retval;
                 int keylen;
 
+                // Make sure node is large enough, note that (rec + 1) * 2 is an offset
+                // relative to the end of node
+                if ((rec + 1) * 2 > (int) nodesize) {
+                    tsk_error_set_errno(TSK_ERR_FS_GENFS);
+                    tsk_error_set_errstr
+                        ("hfs_cat_traverse: offset of record %d in leaf node %d too small (%"
+                        PRIu16 ")", rec, cur_node, nodesize);
+                    free(node);
+                    return 1;
+                }
                 // get the record offset in the node
                 rec_off =
                     tsk_getu16(fs->endian,
                     &node[nodesize - (rec + 1) * 2]);
-                if (rec_off > nodesize) {
+
+                // Need at least 2 bytes for key_len
+                if (rec_off >= nodesize - 2) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
                     tsk_error_set_errstr
                         ("hfs_cat_traverse: offset of record %d in leaf node %d too large (%d vs %"
@@ -948,10 +1000,11 @@ hfs_cat_traverse(HFS_INFO * hfs,
                 key = (hfs_btree_key_cat *) & node[rec_off];
                 keylen = 2 + tsk_getu16(hfs->fs_info.endian, key->key_len);
 
-                if (keylen > nodesize - rec_off) {
+                // Want a key of at least 6 bytes, the size of the first 2 members of hfs_btree_key_cat
+                if ((keylen < 6) || (keylen > nodesize - rec_off)) {
                     tsk_error_set_errno(TSK_ERR_FS_GENFS);
                     tsk_error_set_errstr
-                        ("hfs_cat_traverse: length of key %d in leaf node %d too large (%d vs %"
+                        ("hfs_cat_traverse: length of key %d in leaf node %d out of bounds (6 < %d < %"
                         PRIu16 ")", rec, cur_node, keylen, nodesize);
                     free(node);
                     return 1;
@@ -968,7 +1021,7 @@ hfs_cat_traverse(HFS_INFO * hfs,
                 //                rec_cnid = tsk_getu32(fs->endian, key->file_id);
 
                 retval =
-                    a_cb(hfs, HFS_BT_NODE_TYPE_LEAF, key,
+                    a_cb(hfs, HFS_BT_NODE_TYPE_LEAF, key, keylen, nodesize,
                     cur_off + rec_off, ptr);
                 if (retval == HFS_BTREE_CB_LEAF_STOP) {
                     is_done = 1;
@@ -1014,7 +1067,7 @@ typedef struct {
 
 static uint8_t
 hfs_cat_get_record_offset_cb(HFS_INFO * hfs, int8_t level_type,
-    const hfs_btree_key_cat * cur_key,
+    const hfs_btree_key_cat * cur_key, int cur_keylen, size_t node_size,
     TSK_OFF_T key_off, void *ptr)
 {
     HFS_CAT_GET_RECORD_OFFSET_DATA *offset_data = (HFS_CAT_GET_RECORD_OFFSET_DATA *)ptr;
@@ -1029,14 +1082,14 @@ hfs_cat_get_record_offset_cb(HFS_INFO * hfs, int8_t level_type,
             tsk_getu32(hfs->fs_info.endian, cur_key->parent_cnid));
 
     if (level_type == HFS_BT_NODE_TYPE_IDX) {
-        int diff = hfs_cat_compare_keys(hfs, cur_key, targ_key);
+        int diff = hfs_cat_compare_keys(hfs, cur_key, cur_keylen, targ_key);
         if (diff < 0)
             return HFS_BTREE_CB_IDX_LT;
         else
             return HFS_BTREE_CB_IDX_EQGT;
     }
     else {
-        int diff = hfs_cat_compare_keys(hfs, cur_key, targ_key);
+        int diff = hfs_cat_compare_keys(hfs, cur_key, cur_keylen, targ_key);
 
         // see if this record is for our file or if we passed the interesting entries
         if (diff < 0) {
@@ -1609,9 +1662,15 @@ hfs_cat_file_lookup(HFS_INFO * hfs, TSK_INUM_T inum, HFS_ENTRY * entry,
 
 static uint8_t
 hfs_find_highest_inum_cb(HFS_INFO * hfs, int8_t level_type,
-    const hfs_btree_key_cat * cur_key,
+    const hfs_btree_key_cat * cur_key, int cur_keylen, size_t node_size,
     TSK_OFF_T key_off, void *ptr)
 {
+    if (cur_keylen < 6) {
+        // Note that it would be better to return an error value here
+        // but the current function interface does not support this
+        // Also see issue #2365
+        return -1;
+    }
     // NOTE: This assumes that the biggest inum is the last one that we
     // see.  the traverse method does not currently promise that as part of
     // its callback "contract".
@@ -3854,6 +3913,13 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
         return 0;
     }
 
+    if (attrFile.nodeSize < sizeof(hfs_btree_node)) {
+        error_returned
+            ("hfs_load_extended_attrs: node size too small");
+        close_attr_file(&attrFile);
+        return 1;
+    }
+
     // A place to hold one node worth of data
     nodeData = (uint8_t *) malloc(attrFile.nodeSize);
     if (nodeData == NULL) {
@@ -3947,13 +4013,19 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
             uint8_t *recData;   // pointer to the data part of the record
             uint32_t keyFileID;
 
+            if ((attrFile.nodeSize < 2) || (recIndx > ((attrFile.nodeSize - 2) / 2))) {
+                error_detected(TSK_ERR_FS_READ,
+                    "hfs_load_extended_attrs: Unable to process attribute (recIndx exceeds attrFile.nodeSize)");
+                goto on_error;
+            }
+
             // The offset to the record is stored in table at end of node
             uint8_t *recOffsetTblEntry = &nodeData[attrFile.nodeSize - (2 * (recIndx + 1))];  // data describing where this record is
             uint16_t recOffset = tsk_getu16(endian, recOffsetTblEntry);
             //uint8_t * nextRecOffsetData = &nodeData[attrFile.nodeSize - 2* (recIndx+2)];
 
             // make sure the record and first fields are in the buffer
-            if (recOffset + 14 > attrFile.nodeSize) {
+            if ((attrFile.nodeSize < 14) || (recOffset >= attrFile.nodeSize - 14)) {
                 error_detected(TSK_ERR_FS_READ,
                     "hfs_load_extended_attrs: Unable to process attribute (offset too big)");
                 goto on_error;
@@ -4010,7 +4082,7 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
             // make sure the fields we care about are still in the buffer
             // +2 is because key_len doesn't include its own length
             // +4 is because of the amount of data we read from the data
-            if (recOffset + keyLength + 2 + 4 > attrFile.nodeSize) {
+            if ((keyLength > attrFile.nodeSize - 2 - 4) || (recOffset >= attrFile.nodeSize - 2 - 4 - keyLength)) {
                 error_detected(TSK_ERR_FS_READ,
                     "hfs_load_extended_attrs: Unable to process attribute");
                 goto on_error;
@@ -4058,6 +4130,11 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
         // Loop over the records in this node
         for (recIndx = 0; recIndx < numRec; ++recIndx) {
 
+            if ((attrFile.nodeSize < 2) || (recIndx > ((attrFile.nodeSize - 2) / 2))) {
+                error_detected(TSK_ERR_FS_READ,
+                    "hfs_load_extended_attrs: Unable to process attribute (recIndx exceeds attrFile.nodeSize)");
+                goto on_error;
+            }
             // The offset to the record is stored in table at end of node
             uint8_t *recOffsetTblEntry = &nodeData[attrFile.nodeSize - (2 * (recIndx + 1))];  // data describing where this record is
             uint16_t recOffset = tsk_getu16(endian, recOffsetTblEntry);
@@ -4067,7 +4144,7 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
             uint32_t keyFileID;
 
             // make sure the record and first fields are in the buffer
-            if (recOffset + 14 > attrFile.nodeSize) {
+            if (recOffset >= attrFile.nodeSize - 14) {
                 error_detected(TSK_ERR_FS_READ,
                     "hfs_load_extended_attrs: Unable to process attribute (offset too big)");
                 goto on_error;
@@ -4120,7 +4197,7 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
                 // make sure the fields we care about are still in the buffer
                 // +2 because key_len doesn't include its own length
                 // +16 for the amount of data we'll read from data
-                if (recOffset + keyLength + 2 + 16 > attrFile.nodeSize) {
+                if ((attrFile.nodeSize < 2 + 16) || (keyLength > attrFile.nodeSize - 2 - 16) || (recOffset >= attrFile.nodeSize - 2 - 16 - keyLength)) {
                     error_detected(TSK_ERR_FS_READ,
                         "hfs_load_extended_attrs: Unable to process attribute");
                     goto on_error;
@@ -4150,7 +4227,7 @@ hfs_load_extended_attrs(TSK_FS_FILE * fs_file,
 
                 // Check the attribute fits in the node
                 //if (recordType != HFS_ATTR_RECORD_INLINE_DATA) {
-                if (recOffset + keyLength + 2 + attributeLength > attrFile.nodeSize) {
+                if ((attributeLength > attrFile.nodeSize - 2 - 16 - keyLength) || (recOffset >= attrFile.nodeSize - 2 - 16 - keyLength - attributeLength)) {
                     error_detected(TSK_ERR_FS_READ,
                         "hfs_load_extended_attrs: Unable to process attribute");
                     goto on_error;
diff --git a/tsk/fs/hfs_dent.c b/tsk/fs/hfs_dent.c
index e4cebf8a434e80d9aa8e98e05ec81f8c1517b3f6..54460f14bfe078ca31ef3edf85d849ecb79af46d 100644
--- a/tsk/fs/hfs_dent.c
+++ b/tsk/fs/hfs_dent.c
@@ -198,7 +198,7 @@ typedef struct {
 
 static uint8_t
 hfs_dir_open_meta_cb(HFS_INFO * hfs, int8_t level_type,
-    const hfs_btree_key_cat * cur_key,
+    const hfs_btree_key_cat * cur_key, int cur_keylen, size_t nodesize,
     TSK_OFF_T key_off, void *ptr)
 {
     HFS_DIR_OPEN_META_INFO *info = (HFS_DIR_OPEN_META_INFO *) ptr;
@@ -233,7 +233,19 @@ hfs_dir_open_meta_cb(HFS_INFO * hfs, int8_t level_type,
                 cur_key->parent_cnid) > info->cnid) {
             return HFS_BTREE_CB_LEAF_STOP;
         }
+	// Need at least 2 bytes for key_len
+        if (cur_keylen < 2) {
+            tsk_error_set_errno(TSK_ERR_FS_GENFS);
+            tsk_error_set_errstr("hfs_dir_open_meta: cur_keylen value out of bounds");
+            return HFS_BTREE_CB_ERR;
+        }
         rec_off2 = 2 + tsk_getu16(hfs->fs_info.endian, cur_key->key_len);
+
+        if ((nodesize < 2) || (rec_off2 >= nodesize - 2)) {
+            tsk_error_set_errno(TSK_ERR_FS_GENFS);
+            tsk_error_set_errstr("hfs_dir_open_meta: nodesize value out of bounds");
+            return HFS_BTREE_CB_ERR;
+        }
         rec_type = tsk_getu16(hfs->fs_info.endian, &rec_buf[rec_off2]);
 
         // Catalog entry is for a file
@@ -283,6 +295,11 @@ hfs_dir_open_meta_cb(HFS_INFO * hfs, int8_t level_type,
 
         /* This is a normal file in the folder */
         else if (rec_type == HFS_FILE_RECORD) {
+            if ((nodesize < sizeof(hfs_file)) || (rec_off2 >= nodesize - sizeof(hfs_file))) {
+                tsk_error_set_errno(TSK_ERR_FS_GENFS);
+                tsk_error_set_errstr("hfs_dir_open_meta: nodesize value out of bounds");
+                return HFS_BTREE_CB_ERR;
+            }
             hfs_file *file = (hfs_file *) & rec_buf[rec_off2];
             // This could be a hard link.  We need to test this CNID, and follow it if necessary.
             unsigned char is_err;
diff --git a/tsk/fs/hfs_unicompare.c b/tsk/fs/hfs_unicompare.c
index 752486af072f58dc8f49bed1c11c77e3d2350e83..91d528b88338bf23efe9e6118218eef87156b5ed 100644
--- a/tsk/fs/hfs_unicompare.c
+++ b/tsk/fs/hfs_unicompare.c
@@ -109,7 +109,7 @@
 #include "tsk_hfs.h"
 
 static int hfs_unicode_compare_int(uint16_t endian,
-    const hfs_uni_str * uni1, const hfs_uni_str * uni2);
+    const hfs_uni_str * uni1, int uni1_len, const hfs_uni_str * uni2);
 
 
 /**
@@ -124,18 +124,31 @@ static int hfs_unicode_compare_int(uint16_t endian,
  */
 int
 hfs_unicode_compare(HFS_INFO * hfs, const hfs_uni_str * uni1,
-    const hfs_uni_str * uni2)
+    int uni1_len, const hfs_uni_str * uni2)
 {
     if (hfs->is_case_sensitive) {
         uint16_t l1, l2;
         const uint8_t *s1, *s2;
         uint16_t c1, c2;
 
+        if (uni1_len < 2) {
+            // Note that it would be better to return an error value here
+            // but the current function interface does not support this
+            // Also see issue #2365
+            return -1;
+        }
         l1 = tsk_getu16(hfs->fs_info.endian, uni1->length);
         l2 = tsk_getu16(hfs->fs_info.endian, uni2->length);
         s1 = uni1->unicode;
         s2 = uni2->unicode;
 
+        // Note that l1 contains number of UTF-16 "characters" and uni1_len number of bytes.
+        if (l1 > (uni1_len - 2) / 2) {
+            // Note that it would be better to return an error value here
+            // but the current function interface does not support this
+            // Also see issue #2365
+            return -1;
+        }
         while (1) {
             if ((l1 == 0) && (l2 == 0))
                 return 0;
@@ -157,7 +170,7 @@ hfs_unicode_compare(HFS_INFO * hfs, const hfs_uni_str * uni1,
         return 0;
     }
     else
-        return hfs_unicode_compare_int(hfs->fs_info.endian, uni1, uni2);
+        return hfs_unicode_compare_int(hfs->fs_info.endian, uni1, uni1_len, uni2);
 }
 
 extern uint16_t gLowerCaseTable[];
@@ -169,17 +182,34 @@ extern uint16_t gLowerCaseTable[];
  */
 static int
 hfs_unicode_compare_int(uint16_t endian, const hfs_uni_str * uni1,
-    const hfs_uni_str * uni2)
+    int uni1_len, const hfs_uni_str * uni2)
 {
     uint16_t c1, c2;
     uint16_t temp;
     uint16_t *lowerCaseTable;
-
-    const uint8_t *str1 = uni1->unicode;
-    const uint8_t *str2 = uni2->unicode;
-    uint16_t length1 = tsk_getu16(endian, uni1->length);
-    uint16_t length2 = tsk_getu16(endian, uni2->length);
-
+    const uint8_t *str1 = NULL;
+    const uint8_t *str2 = NULL;
+    uint16_t length1 = 0;
+    uint16_t length2 = 0;
+
+    if (uni1_len < 2) {
+        // Note that it would be better to return an error value here
+        // but the current function interface does not support this
+        // Also see issue #2365
+        return -1;
+    }
+    str1 = uni1->unicode;
+    str2 = uni2->unicode;
+    length1 = tsk_getu16(endian, uni1->length);
+    length2 = tsk_getu16(endian, uni2->length);
+
+    // Note that length1 contains number of UTF-16 "characters" and uni1_len number of bytes.
+    if (length1 > (uni1_len - 2) / 2) {
+        // Note that it would be better to return an error value here
+        // but the current function interface does not support this
+        // Also see issue #2365
+        return -1;
+    }
     lowerCaseTable = gLowerCaseTable;
 
     while (1) {
diff --git a/tsk/fs/iso9660.c b/tsk/fs/iso9660.c
index 779d1cf5c56e06d90cc16a5bf7c0b9740cf458fd..3815517638aeea612adc6003858f6b55ba1590e7 100755
--- a/tsk/fs/iso9660.c
+++ b/tsk/fs/iso9660.c
@@ -90,10 +90,11 @@ iso9660_inode_list_free(TSK_FS_INFO * fs)
  * @param buf Buffer of data to process
  * @param count Length of buffer in bytes.
  * @param hFile File handle to print details to  (or NULL for no printing)
+ * @param recursion_depth Recursion depth to limit the number of self-calls
  * @returns NULL on error
  */
 static rockridge_ext *
-parse_susp(TSK_FS_INFO * fs, char *buf, int count, FILE * hFile)
+parse_susp(TSK_FS_INFO * fs, char *buf, int count, FILE * hFile, int recursion_depth)
 {
     rockridge_ext *rr;
     ISO_INFO *iso = (ISO_INFO *) fs;
@@ -103,6 +104,11 @@ parse_susp(TSK_FS_INFO * fs, char *buf, int count, FILE * hFile)
     if (tsk_verbose)
         tsk_fprintf(stderr, "parse_susp: count is: %d\n", count);
 
+    // 32 is an arbitrary chosen value.
+    if (recursion_depth > 32) {
+        return NULL;
+    }
+
     // allocate the output data structure
     rr = (rockridge_ext *) tsk_malloc(sizeof(rockridge_ext));
     if (rr == NULL) {
@@ -156,8 +162,17 @@ parse_susp(TSK_FS_INFO * fs, char *buf, int count, FILE * hFile)
                     ssize_t cnt =
                         tsk_fs_read(fs, off, buf2,
                         tsk_getu32(fs->endian, ce->celen_m));
+
                     if (cnt == tsk_getu32(fs->endian, ce->celen_m)) {
-                        parse_susp(fs, buf2, (int) cnt, hFile);
+                        rockridge_ext *rr_sub_entry = parse_susp(fs, buf2, (int) cnt, hFile, recursion_depth + 1);
+
+                        // Prevent an infinite loop
+                        if (rr_sub_entry == NULL) {
+                          free(buf2);
+                          free(rr);
+                          return NULL;
+			}
+                        free(rr_sub_entry);
                     }
                     else if (tsk_verbose) {
                         fprintf(stderr,
@@ -298,7 +313,7 @@ parse_susp(TSK_FS_INFO * fs, char *buf, int count, FILE * hFile)
 
             rr_nm = (iso9660_rr_nm_entry *) buf;
 
-            if ((uintptr_t)&rr_nm->name[0] + (int) rr_nm->len - 5 - 1> (uintptr_t)end) {
+            if ((rr_nm->len < 6) || ((uintptr_t)&rr_nm->name[0] + (int) rr_nm->len - 5 - 1> (uintptr_t)end)) {
                 if (tsk_verbose) 
                     tsk_fprintf(stderr, "parse_susp: not enough room for RR alternative name\n");
                 break;
@@ -537,6 +552,8 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                         if (tsk_verbose)
                             tsk_fprintf(stderr,
                                         "iso9660_load_inodes_dir: UTF-16 name length is too large, bailing\n");
+                        free(in_node);
+                        in_node = NULL;
                         break;
                     }
 
@@ -582,6 +599,8 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                         if (tsk_verbose)
                             tsk_fprintf(stderr,
                                         "iso9660_load_inodes_dir: ASCII name length is too large, bailing\n");
+                        free(in_node);
+                        in_node = NULL;
                         break;
                     }
 
@@ -595,6 +614,8 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                     tsk_error_set_errno(TSK_ERR_FS_ARG);
                     tsk_error_set_errstr
                         ("Invalid ctype in iso9660_load_inodes_dir");
+                    free(in_node);
+                    in_node = NULL;
                     return -1;
                 }
 
@@ -618,6 +639,7 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                         tsk_fprintf(stderr,
                                     "iso9660_load_inodes_dir: length of name after processing is 0. bailing\n");
                     free(in_node);
+                    in_node = NULL;
                     break;
                     
                 }
@@ -637,6 +659,7 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                                 "iso9660_load_inodes_dir: file starts past end of image (%"PRIu32"). bailing\n",
                                 tsk_getu32(fs->endian, dentry->ext_loc_m));
                 free(in_node);
+                in_node = NULL;
                 break;
             }
             in_node->offset =
@@ -648,6 +671,7 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                                 "iso9660_load_inodes_dir: file ends past end of image (%"PRIu32" bytes). bailing\n",
                                 tsk_getu32(fs->endian, in_node->inode.dr.data_len_m) + in_node->offset);
                 free(in_node);
+                in_node = NULL;
                 break;
             }
             /* record size to make sure fifos show up as unique files */
@@ -675,12 +699,13 @@ iso9660_load_inodes_dir(TSK_FS_INFO * fs, TSK_OFF_T a_offs, int count,
                 in_node->inode.rr =
                     parse_susp(fs,
                     &buf[b_offs + sizeof(iso9660_dentry) + dentry->fi_len],
-                    extra_bytes, NULL);
+                    extra_bytes, NULL, 0);
                 if (in_node->inode.rr == NULL) {
                     if (tsk_verbose)
                         tsk_fprintf(stderr,
                                     "iso9660_load_inodes_dir: parse_susp returned error (%s). bailing\n", tsk_error_get());
                     free(in_node);
+                    in_node = NULL;
                     break;
                 }
                 
@@ -784,7 +809,9 @@ iso9660_load_inodes_pt_joliet(TSK_FS_INFO * fs, iso9660_svd * svd,
     pt_len = tsk_getu32(fs->endian, svd->pt_size_m);
 
     while (pt_len > 0) {
-        char utf16_buf[ISO9660_MAXNAMLEN_JOL + 1];      // UTF-16 name from img
+        // Since further on cnt + 1 is used and cnt can be ISO9660_MAXNAMLEN_JOL
+        // + 2 ensures utf16_buf is sufficiently large.
+        char utf16_buf[ISO9660_MAXNAMLEN_JOL + 2];      // UTF-16 name from img
         char utf8buf[2 * ISO9660_MAXNAMLEN_JOL + 1];    // UTF-8 version of name
         int readlen;
         TSK_OFF_T extent;       /* offset of extent for current directory */
@@ -812,7 +839,7 @@ iso9660_load_inodes_pt_joliet(TSK_FS_INFO * fs, iso9660_svd * svd,
         if (dir.len_di > ISO9660_MAXNAMLEN_JOL)
             readlen = ISO9660_MAXNAMLEN_JOL;
 
-        memset(utf16_buf, 0, ISO9660_MAXNAMLEN_JOL);
+        memset(utf16_buf, 0, ISO9660_MAXNAMLEN_JOL + 2);
         /* get UCS-2 filename for the entry */
         cnt = tsk_fs_read(fs, pt_offs, (char *) utf16_buf, readlen);
         if (cnt != dir.len_di) {
@@ -2123,10 +2150,12 @@ iso9660_istat(TSK_FS_INFO * fs, TSK_FS_ISTAT_FLAG_ENUM istat_flags, FILE * hFile
             cnt =
                 tsk_fs_read(fs, dinode->susp_off, buf2,
                 (size_t) dinode->susp_len);
+
+            rockridge_ext *rr_entry = NULL;
             if (cnt == dinode->susp_len) {
-                parse_susp(fs, buf2, (int) cnt, hFile);
+                rr_entry = parse_susp(fs, buf2, (int) cnt, hFile, 0);
             }
-            else {
+            if (rr_entry == NULL) {
                 fprintf(hFile, "Error reading Rock Ridge Location\n");
                 if (tsk_verbose) {
                     fprintf(stderr,
diff --git a/tsk/fs/iso9660_dent.c b/tsk/fs/iso9660_dent.c
index 4dc4d61bf02e0db79436b374b190567eab480626..34de246a422e63863ad0a0118c70e7981149740b 100644
--- a/tsk/fs/iso9660_dent.c
+++ b/tsk/fs/iso9660_dent.c
@@ -87,7 +87,7 @@
  * @returns TSK_ERR on error and TSK_OK otherwise
  */
 static uint8_t
-iso9660_proc_dir(TSK_FS_INFO * a_fs, TSK_FS_DIR * a_fs_dir, char *buf,
+iso9660_proc_dir(TSK_FS_INFO * a_fs, TSK_FS_DIR * a_fs_dir, const char *buf,
     size_t a_length, TSK_INUM_T a_addr, TSK_OFF_T a_dir_addr)
 {
     ISO_INFO *iso = (ISO_INFO *) a_fs;
@@ -120,7 +120,6 @@ iso9660_proc_dir(TSK_FS_INFO * a_fs, TSK_FS_DIR * a_fs_dir, char *buf,
 
     buf_idx += dd->entry_len;
     if (buf_idx > a_length - sizeof(iso9660_dentry)) {
-        free(buf);
         tsk_fs_name_free(fs_name);
         return TSK_OK;
     }
@@ -201,7 +200,6 @@ iso9660_proc_dir(TSK_FS_INFO * a_fs, TSK_FS_DIR * a_fs_dir, char *buf,
         }
     }
 
-    free(buf);
     tsk_fs_name_free(fs_name);
 
     return TSK_OK;
@@ -291,6 +289,7 @@ iso9660_dir_open_meta(TSK_FS_INFO * a_fs, TSK_FS_DIR ** a_fs_dir,
             tsk_error_set_errno(TSK_ERR_FS_READ);
         }
         tsk_error_set_errstr2("iso9660_dir_open_meta");
+        free(buf);
         return TSK_ERR;
     }
 
@@ -301,21 +300,26 @@ iso9660_dir_open_meta(TSK_FS_INFO * a_fs, TSK_FS_DIR ** a_fs_dir,
     // if we are listing the root directory, add the Orphan directory entry
     if (a_addr == a_fs->root_inum) {
         TSK_FS_NAME *fs_name = tsk_fs_name_alloc(256, 0);
-        if (fs_name == NULL)
+        if (fs_name == NULL) {
+            free(buf);
             return TSK_ERR;
+        }
 
         if (tsk_fs_dir_make_orphan_dir_name(a_fs, fs_name)) {
             tsk_fs_name_free(fs_name);
+            free(buf);
             return TSK_ERR;
         }
 
         if (tsk_fs_dir_add(fs_dir, fs_name)) {
             tsk_fs_name_free(fs_name);
+            free(buf);
             return TSK_ERR;
         }
         tsk_fs_name_free(fs_name);
     }
 
+    free(buf);
     return retval;
 }
 
diff --git a/tsk/fs/lzvn.c b/tsk/fs/lzvn.c
index f03afefa268346d4b02611191fe79a2d2d9d40c7..b264d0f4fd502030c3f419f89b2ca659b9c965d6 100644
--- a/tsk/fs/lzvn.c
+++ b/tsk/fs/lzvn.c
@@ -17,6 +17,9 @@ COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+NOTE: This is distributed in licenses/bsd.txt
+
 */
 
 // LZVN low-level decoder
diff --git a/tsk/fs/ntfs.c b/tsk/fs/ntfs.c
index 631fe2ff38cb83cc62ced5f6eb491c2314fcc2ac..1761856acb6dd37498da081a945f67e0ecfd7583 100755
--- a/tsk/fs/ntfs.c
+++ b/tsk/fs/ntfs.c
@@ -375,9 +375,20 @@ ntfs_dinode_lookup(NTFS_INFO * a_ntfs, char *a_buf, TSK_INUM_T a_mftnum)
             ("dinode_lookup: More Update Sequence Entries than MFT size");
         return TSK_COR;
     }
-    if (tsk_getu16(fs->endian, mft->upd_off) + 
-            sizeof(ntfs_upd) + 
-            2*(tsk_getu16(fs->endian, mft->upd_cnt) - 1) > a_ntfs->mft_rsize_b) {
+    uint16_t upd_cnt = tsk_getu16(fs->endian, mft->upd_cnt);
+    uint16_t upd_off = tsk_getu16(fs->endian, mft->upd_off);
+
+    // Make sure upd_cnt > 0 to prevent an integer wrap around.
+    if ((upd_cnt == 0) || (upd_cnt > (((a_ntfs->mft_rsize_b) / 2) + 1))) {
+        tsk_error_reset();
+        tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
+        tsk_error_set_errstr
+            ("dinode_lookup: Invalid update count value out of bounds");
+        return TSK_COR;
+    }
+    size_t mft_rsize_b = ((size_t) upd_cnt - 1) * 2;
+
+    if ((size_t) upd_off + sizeof(ntfs_upd) > (a_ntfs->mft_rsize_b - mft_rsize_b)) {
         tsk_error_reset();
         tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
         tsk_error_set_errstr
@@ -386,9 +397,8 @@ ntfs_dinode_lookup(NTFS_INFO * a_ntfs, char *a_buf, TSK_INUM_T a_mftnum)
     }
 
     /* Apply the update sequence structure template */
-    upd =
-        (ntfs_upd *) ((uintptr_t) a_buf + tsk_getu16(fs->endian,
-            mft->upd_off));
+
+    upd = (ntfs_upd *) ((uintptr_t) a_buf + upd_off);
     /* Get the sequence value that each 16-bit value should be */
     sig_seq = tsk_getu16(fs->endian, upd->upd_val);
     /* cycle through each sector */
@@ -594,7 +604,8 @@ ntfs_make_data_run(NTFS_INFO * ntfs, TSK_OFF_T start_vcn,
         int64_t addr_offset = 0;
 
         /* allocate a new tsk_fs_attr_run */
-        if ((data_run = tsk_fs_attr_run_alloc()) == NULL) {
+        data_run = tsk_fs_attr_run_alloc();
+        if (data_run == NULL) {
             tsk_fs_attr_run_free(*a_data_run_head);
             *a_data_run_head = NULL;
             return TSK_ERR;
@@ -1426,7 +1437,11 @@ ntfs_attr_walk_special(const TSK_FS_ATTR * fs_attr,
                             myflags |= TSK_FS_BLOCK_FLAG_UNALLOC;
                         }
 
-                        if (fs_attr->size - off > fs->block_size)
+                        // Unclear what the behavior should be here
+                        // assuming POSIX like behavior is likely the required approach
+                        if (off >= fs_attr->size)
+                            read_len = 0;
+                        else if (fs_attr->size - off > fs->block_size)
                             read_len = fs->block_size;
                         else
                             read_len = (size_t) (fs_attr->size - off);
@@ -2049,8 +2064,10 @@ ntfs_proc_attrseq(NTFS_INFO * ntfs,
                 tsk_error_set_errno(TSK_ERR_FS_CORRUPT);
                 tsk_error_set_errstr("ntfs_proc_attrseq: Compression unit size 2^%d too large",
                     tsk_getu16(fs->endian, attr->c.nr.compusize));
-                if (fs_attr_run)
+                if (fs_attr_run) {
                     tsk_fs_attr_run_free(fs_attr_run);
+                    fs_attr_run = NULL;
+                }
                 return TSK_COR;
             }
 
@@ -2090,9 +2107,10 @@ ntfs_proc_attrseq(NTFS_INFO * ntfs,
                             TSK_FS_ATTR_RES)) == NULL) {
                     tsk_error_errstr2_concat(" - proc_attrseq: getnew");
                     // JRB: Coverity found leak.
-                    if (fs_attr_run)
+                    if (fs_attr_run) {
                         tsk_fs_attr_run_free(fs_attr_run);
-                    fs_attr_run = NULL;
+                        fs_attr_run = NULL;
+                    }
                     return TSK_ERR;
                 }
 
@@ -2132,10 +2150,15 @@ ntfs_proc_attrseq(NTFS_INFO * ntfs,
                     tsk_error_errstr2_concat("- proc_attrseq: set run");
                     
                     // If the run wasn't saved to the attribute, free it now
-                    if (fs_attr_run && (fs_attr->nrd.run == NULL))
+                    if (fs_attr_run && (fs_attr->nrd.run == NULL)) {
                         tsk_fs_attr_run_free(fs_attr_run);
+                        fs_attr_run = NULL;
+                    }
                     return TSK_COR;
                 }
+                // fs_file has taken over management of fs_attr_run
+                fs_attr_run = NULL;
+
                 // set the special functions
                 if (fs_file->meta->flags & TSK_FS_META_FLAG_COMP) {
                     fs_attr->w = ntfs_attr_walk_special;
@@ -2146,6 +2169,10 @@ ntfs_proc_attrseq(NTFS_INFO * ntfs,
             else {
                 if (tsk_fs_attr_add_run(fs, fs_attr, fs_attr_run)) {
                     tsk_error_errstr2_concat(" - proc_attrseq: put run");
+                    if (fs_attr_run) {
+                        tsk_fs_attr_run_free(fs_attr_run);
+                        fs_attr_run = NULL;
+                    }
                     return TSK_COR;
                 }
             }
@@ -2158,7 +2185,9 @@ ntfs_proc_attrseq(NTFS_INFO * ntfs,
 
         /* Standard Information (is always resident) */
         if (type == NTFS_ATYPE_SI) {
-            ntfs_attr_si *si;
+            uint32_t attr_len = tsk_getu32(fs->endian, attr->len);
+            uint16_t attr_off = tsk_getu16(fs->endian, attr->c.r.soff);
+
             if (attr->res != NTFS_MFT_RES) {
                 tsk_error_reset();
                 tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
@@ -2166,8 +2195,23 @@ ntfs_proc_attrseq(NTFS_INFO * ntfs,
                     ("proc_attrseq: Standard Information Attribute is not resident!");
                 return TSK_COR;
             }
-            si = (ntfs_attr_si *) ((uintptr_t) attr +
-                tsk_getu16(fs->endian, attr->c.r.soff));
+            if ((attr_off < 16) || (attr_off >= attr_len)) {
+                tsk_error_reset();
+                tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
+                tsk_error_set_errstr
+                    ("proc_attrseq: resident data offset of Standard Information Attribute is out of bounds!");
+                return TSK_COR;
+            }
+            // A Standard Information Attribute can be 48 or 72 bytes in size (ntfs_attr_si is 72)
+            if ((attr_len < 48) || (attr_off > attr_len - 48)) {
+                tsk_error_reset();
+                tsk_error_set_errno(TSK_ERR_FS_INODE_COR);
+                tsk_error_set_errstr
+                    ("proc_attrseq: resident data of Standard Information Attribute is too small!");
+                return TSK_COR;
+            }
+            ntfs_attr_si *si = (ntfs_attr_si *) ((uintptr_t) attr + attr_off);
+
             fs_file->meta->mtime =
                 nt2unixtime(tsk_getu64(fs->endian, si->mtime));
             fs_file->meta->mtime_nano =
@@ -2460,6 +2504,7 @@ ntfs_proc_attrlist(NTFS_INFO * ntfs,
             (void *) &load_file)) {
         tsk_error_errstr2_concat("- processing attrlist");
         free(mft);
+        free(buf);
         free(map);
         return TSK_ERR;
     }
@@ -2598,6 +2643,8 @@ ntfs_proc_attrlist(NTFS_INFO * ntfs,
             free(mft);
             free(map);
             free(buf);
+            if (mftSeenList != NULL)
+                tsk_stack_free(mftSeenList);
             tsk_error_errstr2_concat(" - proc_attrlist");
             return TSK_ERR;
         }
@@ -2627,6 +2674,8 @@ ntfs_proc_attrlist(NTFS_INFO * ntfs,
                 free(mft);
                 free(map);
                 free(buf);
+                if (mftSeenList != NULL)
+                    tsk_stack_free(mftSeenList);
                 return TSK_COR;
             }
         }
@@ -2654,6 +2703,8 @@ ntfs_proc_attrlist(NTFS_INFO * ntfs,
             free(mft);
             free(map);
             free(buf);
+            if (mftSeenList != NULL)
+                tsk_stack_free(mftSeenList);
             return TSK_COR;
         }
 
@@ -3132,6 +3183,10 @@ ntfs_load_bmap(NTFS_INFO * ntfs)
         tsk_error_set_errstr("Error Finding Bitmap Data Attribute");
         goto on_error;
     }
+    uint32_t attr_len = tsk_getu32(fs->endian, data_attr->len);
+    if (attr_len > ntfs->mft_rsize_b) {
+        goto on_error;
+    }
 
     /* convert to generic form */
     if ((ntfs_make_data_run(ntfs,
diff --git a/tsk/fs/tsk_hfs.h b/tsk/fs/tsk_hfs.h
index 7becb2ab3510f7d9c3861cdc311197648b1758d8..2530e0cfe324680852dd25d011cc5bbb49eb03c5 100644
--- a/tsk/fs/tsk_hfs.h
+++ b/tsk/fs/tsk_hfs.h
@@ -734,7 +734,7 @@ extern uint8_t hfs_UTF16toUTF8(TSK_FS_INFO *, uint8_t *, int, char *, int,
     uint32_t);
 
 extern int hfs_unicode_compare(HFS_INFO *, const hfs_uni_str *,
-    const hfs_uni_str *);
+    int, const hfs_uni_str *);
 extern uint16_t hfs_get_idxkeylen(HFS_INFO * hfs, uint16_t keylen,
     const hfs_btree_header_record * header);
 
@@ -765,7 +765,7 @@ extern char hfs_is_hard_link(TSK_FS_INFO * fs, TSK_INUM_T inum);
  * @param ptr Pointer to data that was passed into parent
  */
 typedef uint8_t(*TSK_HFS_BTREE_CB) (HFS_INFO *, int8_t level_type,
-    const hfs_btree_key_cat * cur_key,
+    const hfs_btree_key_cat * cur_key, int cur_keylen, size_t node_size,
     TSK_OFF_T key_off, void *ptr);
 // return values for callback
 #define HFS_BTREE_CB_IDX_LT     1       // current key is less than target (keeps looking in node)
diff --git a/tsk/fs/unix_misc.c b/tsk/fs/unix_misc.c
index 3f114c8a2c7b2f88b8fc585bb4f423c5cb10bae7..896bba66ee4ad497223023b5262a4be3376884c2 100644
--- a/tsk/fs/unix_misc.c
+++ b/tsk/fs/unix_misc.c
@@ -180,6 +180,7 @@ unix_make_data_run_indirect(TSK_FS_INFO * fs, TSK_FS_ATTR * fs_attr,
             }
             tsk_error_set_errstr2("unix_make_data_run_indir: Block %"
                 PRIuDADDR, addr);
+            free(data_run);
             return -1;
         }
     }
@@ -187,6 +188,8 @@ unix_make_data_run_indirect(TSK_FS_INFO * fs, TSK_FS_ATTR * fs_attr,
     // save the run
     tsk_fs_attr_append_run(fs, fs_attr_indir, data_run);
 
+    data_run = NULL;
+
     // convert the raw addresses to the correct endian ordering
     if ((fs->ftype == TSK_FS_TYPE_FFS1)
         || (fs->ftype == TSK_FS_TYPE_FFS1B)
@@ -232,10 +235,10 @@ unix_make_data_run_indirect(TSK_FS_INFO * fs, TSK_FS_ATTR * fs_attr,
         }
     }
 
-    if (retval == -1)
+    if (retval == -1) {
         return -1;
-    else
-        return length - length_remain;
+    }
+    return length - length_remain;
 }
 
 
diff --git a/tsk/img/Makefile.am b/tsk/img/Makefile.am
index cf08eca1977a053e2417d1ed051bfb2cb609bbcb..9997d28750ee89e8a3fed4edb332f49751ad1b92 100644
--- a/tsk/img/Makefile.am
+++ b/tsk/img/Makefile.am
@@ -4,7 +4,7 @@ EXTRA_DIST = .indent.pro
 noinst_LTLIBRARIES = libtskimg.la
 libtskimg_la_SOURCES = img_open.cpp img_types.c raw.c raw.h \
     aff.c aff.h ewf.cpp ewf.h tsk_img_i.h img_io.c mult_files.c \
-    vhd.c vhd.h vmdk.c vmdk.h img_writer.cpp img_writer.h
+    vhd.c vhd.h vmdk.c vmdk.h img_writer.cpp img_writer.h unsupported_types.c unsupported_types.h
 
 indent:
 	indent *.c *.h
diff --git a/tsk/img/raw.c b/tsk/img/raw.c
index e8e0f11136084b02e1d5069106ea695a0ee7b15f..2b92ab6173575eff511034902c3bc52da715e0fa 100755
--- a/tsk/img/raw.c
+++ b/tsk/img/raw.c
@@ -239,7 +239,7 @@ raw_read_segment(IMG_RAW_INFO * raw_info, int idx, char *buf,
         // the number of bytes read
         if (sector_aligned_buf != NULL) {
             memcpy(buf, sector_aligned_buf + rel_offset % raw_info->img_info.sector_size, len);
-            cnt = cnt - offset_to_read % raw_info->img_info.sector_size;
+            cnt = cnt - rel_offset % raw_info->img_info.sector_size;
             if (cnt < 0) {
                 cnt = -1;
             }
diff --git a/tsk/img/unsupported_types.c b/tsk/img/unsupported_types.c
new file mode 100644
index 0000000000000000000000000000000000000000..ed81242c5a3bf5210223c0f98850da13c4c0682b
--- /dev/null
+++ b/tsk/img/unsupported_types.c
@@ -0,0 +1,182 @@
+/*
+** The Sleuth Kit
+**
+** Copyright (c) 2021 Basis Technology Corp.  All rights reserved
+** Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#include "unsupported_types.h"
+
+/**
+ * Compare the beginning of the buffer with the given signature.
+ *
+ * @return 1 if the signature is found, 0 otherwise
+ */
+int
+detectImageSignatureWithOffset(const char * signature, size_t signatureLen, size_t offset, const char * buf, size_t bufLen) {
+
+    if (signatureLen + offset > bufLen) {
+        return 0;
+    }
+
+    if (memcmp(signature, buf + offset, signatureLen) == 0) {
+        return 1;
+    }
+    return 0;
+}
+
+/**
+* Compare the beginning of the buffer with the given signature.
+*
+* @return 1 if the signature is found, 0 otherwise
+*/
+int
+detectImageSignature(const char * signature, size_t signatureLen, const char * buf, size_t bufLen) {
+    return detectImageSignatureWithOffset(signature, signatureLen, 0, buf, bufLen);
+}
+
+/**
+* Calculate the checksum on the first block to see if matches the tar format.
+*
+* @return 1 if the checksum is valid, 0 otherwise
+*/
+int
+verifyTarChecksum(const char * buf, size_t bufLen) {
+    if (bufLen < 512) {
+        return 0;
+    }
+
+    // Calculate checksum of first 512 bytes.
+    unsigned int cksum = 0;
+    const int cksumOffset = 148;
+    const int cksumLength = 8;
+    for (int i = 0; i < 512; i++) {
+        // Add each byte. For the checksum bytes, add a space.
+        if ((i < cksumOffset) || (i >= cksumOffset + cksumLength)) {
+            cksum += (unsigned char)buf[i];
+        }
+        else {
+            cksum += ' ';
+        }
+    }
+
+    // Convert the checksum field (octal) to a number
+    unsigned int savedCksum = 0;
+
+    // Skip leading spaces
+    int startingOffset = cksumOffset;
+    for (int i = 0; i < cksumLength; i++) {
+        unsigned char b = buf[cksumOffset + i];
+        if (b == ' ') {
+            startingOffset++;
+        }
+        else {
+            // Hit a non-space character
+            break;
+        }
+    }
+
+    // If the checksum is all spaces, it is not valid
+    if (startingOffset == cksumOffset + cksumLength) {
+        return 0;
+    }
+
+    // Convert octal digits
+    for (int offset = startingOffset; offset < cksumOffset + cksumLength; offset++) {
+        unsigned char b = buf[offset];
+
+        if (b == 0 || b == ' ') {
+            // We're done reading the checksum
+            break;
+        }
+
+        if (b < '0' || b > '7') {
+            // Found an illegal character
+            return 0;
+        }
+
+        // Add the next digit
+        savedCksum = savedCksum << 3 | (b - '0');
+    }
+
+    if (savedCksum == cksum) {
+        return 1;
+    }
+
+    return 0;
+}
+
+/**
+ * Check if the given raw image is a known but unsupported type.
+ * Return string should be freed by caller.
+ *
+ * @return The name of the image type or null if it doesn't match a known type.
+ */
+char* detectUnsupportedImageType(TSK_IMG_INFO * img_info) {
+
+    // Read the beginning of the image. Try to read in enough bytes for all signatures.
+    // The tar checksum calculation requires 512 bytes.
+    size_t maxLen = 512; // Bytes to read
+    size_t len;          // The actual number of bytes read
+    char* buf = (char*)tsk_malloc(maxLen);
+    if (buf == NULL) {
+        return NULL;
+    }
+
+    len = tsk_img_read(img_info, 0, buf, maxLen);
+    if (len == 0) {
+        free(buf);
+        return NULL;
+    }
+
+    char * result = (char*) tsk_malloc(256);
+    if (result == NULL) {
+        free(buf);
+        return NULL;
+    }
+    result[0] = '\0';
+
+    if (detectImageSignature("ADSEGMENTEDFILE", 15, buf, len)) {
+        strcpy(result, "Custom Content Image (AD1)");
+    }
+    else if (detectImageSignature("EVF2\r\n\x81\x00", 8, buf, len)) {
+        strcpy(result, "EWF Version 2 (Ex01)");
+    }
+    else if (detectImageSignature("Rar!\x1a\x07", 6, buf, len)) {
+        strcpy(result, "RAR Archive");
+    }
+    else if (detectImageSignature("7z\xbc\xaf\x27\x1c", 6, buf, len)) {
+        strcpy(result, "7-Zip Archive");
+    }
+    else if (detectImageSignature("[Dumps]", 7, buf, len)) {
+        strcpy(result, "Cellebrite (UFD)");
+    }
+    else if (detectImageSignatureWithOffset("ustar", 5, 257, buf, len)) {
+        strcpy(result, "Tar Archive");
+    }
+    else if (detectImageSignature("PK\x03\x04", 4, buf, len) || detectImageSignature("PK\x05\x06", 4, buf, len)
+        || (detectImageSignature("PK\x07\x08", 4, buf, len))) {
+        strcpy(result, "Zip Archive");
+    }
+    else if (detectImageSignature("BZh", 3, buf, len)) {
+        strcpy(result, "Bzip Archive");
+    }
+    else if (detectImageSignature("\x1f\x8b", 2, buf, len)) {
+        strcpy(result, "Gzip Archive");
+    } 
+    else if (verifyTarChecksum(buf, len)) {
+        strcpy(result, "Tar Archive");
+    }
+
+    free(buf);
+    if (strlen(result) > 0) {
+        return result;
+    }
+
+    free(result);
+    result = NULL;
+    return NULL;
+}
\ No newline at end of file
diff --git a/tsk/img/unsupported_types.h b/tsk/img/unsupported_types.h
new file mode 100644
index 0000000000000000000000000000000000000000..2d4531fec4e5d9f5d9c61bb6690373c937826ac1
--- /dev/null
+++ b/tsk/img/unsupported_types.h
@@ -0,0 +1,26 @@
+/*
+** The Sleuth Kit
+**
+** Copyright (c) 2021 Basis Technology Corp.  All rights reserved
+** Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#ifndef _UNSUPPORTED_TYPES_H_
+#define _UNSUPPORTED_TYPES_H_
+
+#include "tsk/base/tsk_base.h"
+#include "tsk/img/tsk_img.h"
+#include "tsk/base/tsk_base_i.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+extern char* detectUnsupportedImageType(TSK_IMG_INFO * img_info);
+#ifdef __cplusplus
+}
+#endif
+
+#endif
\ No newline at end of file
diff --git a/tsk/util/Makefile.am b/tsk/util/Makefile.am
index 547dd819cb698993d41a2f90b2f46dcb900c661a..b425e042f575d00a8c89e4c59dfa9e8e8bfc7adb 100644
--- a/tsk/util/Makefile.am
+++ b/tsk/util/Makefile.am
@@ -3,7 +3,7 @@ AM_CXXFLAGS = -Wall -Wextra -Werror
 EXTRA_DIST = .indent.pro 
 
 noinst_LTLIBRARIES = libtskutil.la
-libtskutil_la_SOURCES = crypto.cpp
+libtskutil_la_SOURCES = crypto.cpp detect_encryption.c
 
 indent:
 	indent *.c *.cpp *.h *.hpp
diff --git a/tsk/util/crypto.cpp b/tsk/util/crypto.cpp
index 263dc63708819db6eb65265c23a21b4015bcac01..870fe83e2132c36604b7f3400ce8beb59c7af19d 100644
--- a/tsk/util/crypto.cpp
+++ b/tsk/util/crypto.cpp
@@ -1,3 +1,16 @@
+/*
+ * The Sleuth Kit
+ *
+ * Brian Carrier [carrier <at> sleuthkit [dot] org]
+ * Copyright (c) 2018-2019 BlackBag Technologies.  All Rights reserved
+ *
+ * This software is distributed under the Common Public License 1.0
+ */
+
+/* This file contains routines used by APFS code.
+ * It could probably move into the 'fs' folder.
+ * It is XTS wrappers around OpenSSL
+ */
 #include "crypto.hpp"
 
 #ifdef HAVE_LIBOPENSSL
@@ -195,4 +208,4 @@ std::unique_ptr<uint8_t[]> hash_buffer_sha256(const void *input,
 
   return hash;
 }
-#endif
\ No newline at end of file
+#endif
diff --git a/tsk/util/crypto.hpp b/tsk/util/crypto.hpp
index 490a7924f77579700481da906f3cdcdcaaafa392..9ff99881fd67a173ae068677c7525e98a125c453 100644
--- a/tsk/util/crypto.hpp
+++ b/tsk/util/crypto.hpp
@@ -1,5 +1,18 @@
 #pragma once
 
+/*
+ * The Sleuth Kit
+ *
+ * Brian Carrier [carrier <at> sleuthkit [dot] org]
+ * Copyright (c) 2018-2019 BlackBag Technologies.  All Rights reserved
+ *
+ * This software is distributed under the Common Public License 1.0
+ */
+
+/**
+ * This is currently being used only by APFS
+ */
+
 #include "../base/tsk_base.h"
 
 #ifdef HAVE_LIBOPENSSL
@@ -48,4 +61,4 @@ std::unique_ptr<uint8_t[]> hash_buffer_md5(const void *input,
 std::unique_ptr<uint8_t[]> hash_buffer_sha256(const void *input,
                                               size_t len) noexcept;
 
-#endif
\ No newline at end of file
+#endif
diff --git a/tsk/util/detect_encryption.c b/tsk/util/detect_encryption.c
new file mode 100644
index 0000000000000000000000000000000000000000..9e64cbcaac5996059ec05c7eb0a4dc18261d3e62
--- /dev/null
+++ b/tsk/util/detect_encryption.c
@@ -0,0 +1,309 @@
+/*
+** The Sleuth Kit
+**
+** Copyright (c) 2021 Basis Technology Corp.  All rights reserved
+** Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#include "detect_encryption.h"
+
+// Scans the buffer and returns 1 if the given signature is found, 0 otherwise.
+// Looks for the signature starting at each byte from startingOffset to endingOffset.
+int
+detectSignature(const char * signature, size_t signatureLen, size_t startingOffset, size_t endingOffset, const char * buf, size_t bufLen) {
+
+    for (size_t offset = startingOffset; offset <= endingOffset; offset++) {
+        if (offset + signatureLen >= bufLen) {
+            return 0;
+        }
+
+        if (memcmp(signature, buf + offset, signatureLen) == 0) {
+            return 1;
+        }
+    }
+    return 0;
+}
+
+// Returns 1 if LUKS signature is found, 0 otherwise
+int
+detectLUKS(const char * buf, size_t len) {
+    const char * signature = "LUKS\xba\xbe";
+    return detectSignature(signature, strlen(signature), 0, 0, buf, len);
+}
+
+// Returns 1 if BitLocker signature is found, 0 otherwise
+int
+detectBitLocker(const char * buf, size_t len) {
+
+    // Look for the signature near the beginning of the buffer
+    const char * signature = "-FVE-FS-";
+    return detectSignature(signature, strlen(signature), 0, 16, buf, len);
+}
+
+// Returns 1 if FileVault signature is found, 0 otherwise
+int
+detectFileVault(const char * buf, size_t len) {
+    const char * signature = "encrdsa";
+    return detectSignature(signature, strlen(signature), 0, 0, buf, len);
+}
+
+// Returns 1 if Check Point signature is found, 0 otherwise
+int
+detectCheckPoint(const char * buf, size_t len) {
+    // Look for the signature near the beginning of the buffer
+    const char * signature = "Protect";
+    return detectSignature(signature, strlen(signature), 80, 100, buf, len);
+}
+
+// Returns 1 if McAfee Safeboot signature is found, 0 otherwise
+int
+detectMcAfee(const char * buf, size_t len) {
+    // Look for the signature near the beginning of the buffer. Check two capitalizations.
+    const char * signature = "Safeboot";
+    const char * altSignature = "SafeBoot";
+    return (detectSignature(signature, strlen(signature), 0, 32, buf, len)
+        | detectSignature(altSignature, strlen(altSignature), 0, 32, buf, len));
+}
+
+// Returns 1 if Guardian Edge signature is found, 0 otherwise
+int
+detectGuardianEdge(const char * buf, size_t len) {
+    // Look for the signature near the beginning of the buffer
+    const char * signature = "PCGM";
+    return detectSignature(signature, strlen(signature), 0, 32, buf, len);
+}
+
+// Returns 1 if Sophos Safeguard signature is found, 0 otherwise
+int
+detectSophos(const char * buf, size_t len) {
+    // Look for the signature near the beginning of the buffer
+    const char * signature = "SGM400";
+    const char * altSignature = "SGE400";
+    return (detectSignature(signature, strlen(signature), 110, 150, buf, len)
+        | detectSignature(altSignature, strlen(altSignature), 110, 150, buf, len));
+}
+
+// Returns 1 if WinMagic SecureDoc signature is found, 0 otherwise
+int
+detectWinMagic(const char * buf, size_t len) {
+    // Look for the signature near the beginning of the buffer
+    const char * signature = "WMSD";
+    return detectSignature(signature, strlen(signature), 236, 256, buf, len);
+}
+
+// Returns 1 if Symantec PGP signature is found, 0 otherwise
+int
+detectSymantecPGP(const char * buf, size_t len) {
+    // Look for the signature near the beginning of the buffer
+    const char * signature = "\xeb\x48\x90PGPGUARD";
+    return detectSignature(signature, strlen(signature), 0, 32, buf, len);
+}
+
+// Returns the entropy of the beginning of the image.
+double
+calculateEntropy(TSK_IMG_INFO * img_info, TSK_DADDR_T offset) {
+
+    // Initialize frequency counts
+    int byteCounts[256];
+    for (int i = 0; i < 256; i++) {
+        byteCounts[i] = 0;
+    }
+
+    // Read in blocks of 65536 bytes, skipping the first one that is more likely to contain header data.
+    size_t bufLen = 65536;
+    char buf[65536];
+    size_t bytesRead = 0;
+    for (uint64_t i = 1; i < 100; i++) {
+        if ((i + 1) * bufLen > (uint64_t)img_info->size - offset) {
+            break;
+        }
+
+        if (tsk_img_read(img_info, offset + i * bufLen, buf, bufLen) != bufLen) {
+            break;
+        }
+
+        for (int j = 0; j < bufLen; j++) {
+            unsigned char b = buf[j] & 0xff;
+            byteCounts[b]++;
+        }
+        bytesRead += bufLen;
+    }
+
+    // Calculate entropy
+    double entropy = 0.0;
+    double log2 = log(2);
+    for (int i = 0; i < 256; i++) {
+        if (byteCounts[i] > 0) {
+            double p = (double)(byteCounts[i]) / bytesRead;
+            entropy -= p * log(p) / log2;
+        }
+    }
+    return entropy;
+}
+
+/**
+ * Detect volume-type encryption in the image starting at the given offset.
+ * May return null on error. Note that client is responsible for freeing the result.
+ * 
+ * @param img_info The open image
+ * @param offset   The offset for the beginning of the volume
+ *
+ * @return encryption_detected_result containing the result of the check. null for certain types of errors.
+*/
+encryption_detected_result*
+detectVolumeEncryption(TSK_IMG_INFO * img_info, TSK_DADDR_T offset) {
+
+    encryption_detected_result* result = (encryption_detected_result*)tsk_malloc(sizeof(encryption_detected_result));
+    if (result == NULL) {
+        return result;
+    }
+    result->encryptionType = ENCRYPTION_DETECTED_NONE;
+    result->desc[0] = '\0';
+
+    if (img_info == NULL) {
+        return result;
+    }
+    if (offset > (uint64_t)img_info->size) {
+        return result;
+    }
+
+    // Read the beginning of the image. There should be room for all the signature searches.
+    size_t len = 1024;
+    char* buf = (char*)tsk_malloc(len);
+    if (buf == NULL) {
+        return result;
+    }
+    if (tsk_img_read(img_info, offset, buf, len) != len) {
+        free(buf);
+        return result;
+    }
+
+    // Look for BitLocker signature
+    if (detectBitLocker(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "BitLocker");
+        free(buf);
+        return result;
+    }
+
+    // Look for Linux Unified Key Setup (LUKS) signature
+    if (detectLUKS(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "LUKS");
+        free(buf);
+        return result;
+    }
+
+    // Look for FileVault
+    if (detectFileVault(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "FileVault");
+        free(buf);
+        return result;
+    }
+
+    free(buf);
+
+    // Final test - check entropy
+    double entropy = calculateEntropy(img_info, offset);
+    if (entropy > 7.5) {
+        result->encryptionType = ENCRYPTION_DETECTED_ENTROPY;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "High entropy (%1.2lf)", entropy);
+        return result;
+    }
+
+    return result;
+}
+
+/**
+* Detect full disk encryption in the image starting at the given offset.
+* May return null on error. Note that client is responsible for freeing the result.
+*
+* @param img_info The open image
+* @param offset   The offset for the beginning of the image TODO TODO do we need this??
+*
+* @return encryption_detected_result containing the result of the check. null for certain types of errors.
+*/
+encryption_detected_result*
+detectDiskEncryption(TSK_IMG_INFO * img_info, TSK_DADDR_T offset) {
+
+    encryption_detected_result* result = (encryption_detected_result*)tsk_malloc(sizeof(encryption_detected_result));
+    if (result == NULL) {
+        return result;
+    }
+    result->encryptionType = ENCRYPTION_DETECTED_NONE;
+    result->desc[0] = '\0';
+
+    if (img_info == NULL) {
+        return result;
+    }
+    if (offset > (uint64_t)img_info->size) {
+        return result;
+    }
+
+    // Read the beginning of the image. There should be room for all the signature searches.
+    size_t len = 1024;
+    char* buf = (char*)tsk_malloc(len);
+    if (buf == NULL) {
+        return result;
+    }
+    if (tsk_img_read(img_info, offset, buf, len) != len) {
+        free(buf);
+        return result;
+    }
+
+    // Look for Symatec PGP signature
+    if (detectSymantecPGP(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "Symantec PGP");
+        free(buf);
+        return result;
+    }
+
+    // Look for McAfee Safeboot signature
+    if (detectMcAfee(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "McAfee Safeboot");
+        free(buf);
+        return result;
+    }
+
+    // Look for Sophos Safeguard
+    if (detectSophos(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "Sophos Safeguard");
+        free(buf);
+        return result;
+    }
+
+    // Look for Guardian Edge signature
+    if (detectGuardianEdge(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "Guardian Edge");
+        free(buf);
+        return result;
+    }
+
+    // Look for Check Point signature
+    if (detectCheckPoint(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "Check Point");
+        free(buf);
+        return result;
+    }
+
+    // Look for WinMagic SecureDoc signature
+    if (detectWinMagic(buf, len)) {
+        result->encryptionType = ENCRYPTION_DETECTED_SIGNATURE;
+        snprintf(result->desc, TSK_ERROR_STRING_MAX_LENGTH, "WinMagic SecureDoc");
+        free(buf);
+        return result;
+    }
+    free(buf);
+    return result;
+}
+
+
diff --git a/tsk/util/detect_encryption.h b/tsk/util/detect_encryption.h
new file mode 100644
index 0000000000000000000000000000000000000000..1c60137fd055929341479e4c4e7a2a1f653c15ee
--- /dev/null
+++ b/tsk/util/detect_encryption.h
@@ -0,0 +1,36 @@
+/*
+** The Sleuth Kit
+**
+** Copyright (c) 2021 Basis Technology Corp.  All rights reserved
+** Contact: Brian Carrier [carrier <at> sleuthkit [dot] org]
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#ifndef _DETECT_ENCRYPTION_H_
+#define _DETECT_ENCRYPTION_H_
+
+#include "tsk/base/tsk_base.h"
+#include "tsk/img/tsk_img.h"
+#include "tsk/base/tsk_base_i.h"
+#include <math.h>
+
+// Enum for the type of encryption detected
+typedef enum {
+    ENCRYPTION_DETECTED_NONE = 0,
+    ENCRYPTION_DETECTED_SIGNATURE = 1,
+    ENCRYPTION_DETECTED_ENTROPY = 2
+} encryption_detected_type;
+
+// Struct for storing the result of the encryption detection
+typedef struct encryption_detected_result {
+    encryption_detected_type encryptionType;
+    char desc[TSK_ERROR_STRING_MAX_LENGTH];
+} encryption_detected_result;
+
+encryption_detected_result* detectVolumeEncryption(TSK_IMG_INFO * img_info, TSK_DADDR_T offset);
+
+encryption_detected_result* detectDiskEncryption(TSK_IMG_INFO * img_info, TSK_DADDR_T offset);
+
+#endif
\ No newline at end of file
diff --git a/tsk/vs/dos.c b/tsk/vs/dos.c
index 3bd128e7b693f7bbea283c8c52392b468940127b..6385f4af4cef5a76184cb8b7e70ca05d0796e863 100644
--- a/tsk/vs/dos.c
+++ b/tsk/vs/dos.c
@@ -769,6 +769,7 @@ dos_load_ext_table(TSK_VS_INFO * vs, TSK_DADDR_T sect_cur,
                     tsk_error_set_errno(TSK_ERR_VS_BLK_NUM);
                     tsk_error_set_errstr
                         ("dos_load_ext_table: Loop in partition table detected");
+                    free(sect_buf);
                     return 1;
                 }
                 part_info = part_info->next;
diff --git a/tsk/vs/mac.c b/tsk/vs/mac.c
index 8853165c2a62f486cf06ee106af4c40771300054..07ac1ae3c2c5eb0dec6f75e2e4deba7462d60ccb 100644
--- a/tsk/vs/mac.c
+++ b/tsk/vs/mac.c
@@ -144,6 +144,7 @@ mac_load_table(TSK_VS_INFO * vs)
         }
 
         strncpy(str, (char *) part->type, sizeof(part->name));
+        str[sizeof(part->name) - 1] = 0;
 
         if (NULL == tsk_vs_part_add(vs, (TSK_DADDR_T) part_start,
                 (TSK_DADDR_T) part_size, (TSK_VS_PART_FLAG_ENUM)flag, str, -1,
diff --git a/tsk/vs/mm_open.c b/tsk/vs/mm_open.c
index f8bdce2e9759ad7c7a9b174003436608c6b46029..6ac5a6ce8daf774fe24128ea6e9173ab9916fb83 100644
--- a/tsk/vs/mm_open.c
+++ b/tsk/vs/mm_open.c
@@ -15,6 +15,7 @@
  */
 
 #include "tsk_vs_i.h"
+#include "tsk/util/detect_encryption.h"
 
 
 /**
@@ -175,7 +176,22 @@ tsk_vs_open(TSK_IMG_INFO * img_info, TSK_DADDR_T offset,
 
         if (vs_set == NULL) {
             tsk_error_reset();
-            tsk_error_set_errno(TSK_ERR_VS_UNKTYPE);
+
+            // Check whether the volume system appears to be encrypted.
+            // Note that detectDiskEncryption does not do an entropy calculation - high entropy 
+            // files will be reported by tsk_fs_open_img().
+            encryption_detected_result* result = detectDiskEncryption(img_info, offset);
+            if (result != NULL) {
+                if (result->encryptionType == ENCRYPTION_DETECTED_SIGNATURE) {
+                    tsk_error_set_errno(TSK_ERR_VS_ENCRYPTED);
+                    tsk_error_set_errstr(result->desc);
+                }
+                free(result);
+                result = NULL;
+            }
+            else {
+                tsk_error_set_errno(TSK_ERR_VS_UNKTYPE);
+            }
             return NULL;
         }
 
diff --git a/win32/libtsk/libtsk.vcxproj b/win32/libtsk/libtsk.vcxproj
index 72851289aed60823ba0e0e46158e5e17f0e50307..55d2da31ea84d05f8d390a971d181887900de517 100755
--- a/win32/libtsk/libtsk.vcxproj
+++ b/win32/libtsk/libtsk.vcxproj
@@ -300,6 +300,7 @@ xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(Ou
     <ClCompile Include="..\..\tsk\hashdb\hdb_base.c" />
     <ClCompile Include="..\..\tsk\hashdb\binsrch_index.cpp" />
     <ClCompile Include="..\..\tsk\img\img_writer.cpp" />
+    <ClCompile Include="..\..\tsk\img\unsupported_types.c" />
     <ClCompile Include="..\..\tsk\img\vhd.c" />
     <ClCompile Include="..\..\tsk\img\vmdk.c" />
     <ClCompile Include="..\..\tsk\pool\apfs_pool_compat.cpp" />
@@ -308,6 +309,7 @@ xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(Ou
     <ClCompile Include="..\..\tsk\pool\pool_read.cpp" />
     <ClCompile Include="..\..\tsk\pool\pool_types.cpp" />
     <ClCompile Include="..\..\tsk\util\crypto.cpp" />
+    <ClCompile Include="..\..\tsk\util\detect_encryption.c" />
     <ClCompile Include="..\..\tsk\vs\bsd.c" />
     <ClCompile Include="..\..\tsk\vs\dos.c" />
     <ClCompile Include="..\..\tsk\vs\gpt.c" />
@@ -410,6 +412,7 @@ xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(Ou
     <ClInclude Include="..\..\tsk\hashdb\tsk_hash_info.h" />
     <ClInclude Include="..\..\tsk\img\img_writer.h" />
     <ClInclude Include="..\..\tsk\img\pool.hpp" />
+    <ClInclude Include="..\..\tsk\img\unsupported_types.h" />
     <ClInclude Include="..\..\tsk\img\vhd.h" />
     <ClInclude Include="..\..\tsk\img\vmdk.h" />
     <ClInclude Include="..\..\tsk\libtsk.h" />
@@ -420,6 +423,7 @@ xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(Ou
     <ClInclude Include="..\..\tsk\pool\tsk_pool.h" />
     <ClInclude Include="..\..\tsk\pool\tsk_pool.hpp" />
     <ClInclude Include="..\..\tsk\util\crypto.hpp" />
+    <ClInclude Include="..\..\tsk\util\detect_encryption.h" />
     <ClInclude Include="..\..\tsk\util\lw_shared_ptr.hpp" />
     <ClInclude Include="..\..\tsk\util\span.hpp" />
     <ClInclude Include="..\..\tsk\vs\tsk_bsd.h" />
@@ -463,7 +467,7 @@ xcopy /E /Y "$(VCInstallDir)\redist\$(PlatformTarget)\Microsoft.VC140.CRT" "$(Ou
     <Import Project="..\packages\zlib_native.redist.1.2.11\build\native\zlib_native.redist.targets" Condition="Exists('..\packages\zlib_native.redist.1.2.11\build\native\zlib_native.redist.targets')" />
     <Import Project="..\packages\zlib_native.1.2.11\build\native\zlib_native.targets" Condition="Exists('..\packages\zlib_native.1.2.11\build\native\zlib_native.targets')" />
   </ImportGroup>
-  <Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild">
+  <Target Name="EnsureNuGetPackageBuildImports" BeforeTargets="PrepareForBuild" Condition="!$(Configuration.EndsWith('_NoLibs'))">
     <PropertyGroup>
       <ErrorText>This project references NuGet package(s) that are missing on this computer. Use NuGet Package Restore to download them.  For more information, see http://go.microsoft.com/fwlink/?LinkID=322105. The missing file is {0}.</ErrorText>
     </PropertyGroup>
diff --git a/win32/libtsk/libtsk.vcxproj.filters b/win32/libtsk/libtsk.vcxproj.filters
index b98509b3d0a57120ce52a2205858c1922af4cd14..1d6e4b3677bfbae933a78575cbfea5ef1a5991db 100755
--- a/win32/libtsk/libtsk.vcxproj.filters
+++ b/win32/libtsk/libtsk.vcxproj.filters
@@ -363,6 +363,12 @@
     <ClCompile Include="..\..\tsk\img\img_open.cpp">
       <Filter>img</Filter>
     </ClCompile>
+    <ClCompile Include="..\..\tsk\util\detect_encryption.c">
+      <Filter>util</Filter>
+    </ClCompile>
+    <ClCompile Include="..\..\tsk\img\unsupported_types.c">
+      <Filter>img</Filter>
+    </ClCompile>
   </ItemGroup>
   <ItemGroup>
     <ClInclude Include="..\..\tsk\vs\tsk_bsd.h">
@@ -534,6 +540,12 @@
     <ClInclude Include="..\..\tsk\img\pool.hpp">
       <Filter>img</Filter>
     </ClInclude>
+    <ClInclude Include="..\..\tsk\util\detect_encryption.h">
+      <Filter>util</Filter>
+    </ClInclude>
+    <ClInclude Include="..\..\tsk\img\unsupported_types.h">
+      <Filter>img</Filter>
+    </ClInclude>
   </ItemGroup>
   <ItemGroup>
     <None Include="packages.config" />
diff --git a/win32/tsk-win.sln b/win32/tsk-win.sln
index 7aa2bca8cc9f1672907d050c358d28e5ba5adcd8..58c4a67e4a37e816e88010adda9f14c97aea42ac 100644
--- a/win32/tsk-win.sln
+++ b/win32/tsk-win.sln
@@ -159,6 +159,8 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "Rejistry++", "rejistry++\Re
 EndProject
 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "pstat", "pstat\pstat.vcxproj", "{5D75FBFB-539A-4014-ACEB-520BB1451F00}"
 EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "tsk_imageinfo", "tsk_imageinfo\tsk_imageinfo.vcxproj", "{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}"
+EndProject
 Global
 	GlobalSection(SolutionConfigurationPlatforms) = preSolution
 		Debug_NoLibs|Win32 = Debug_NoLibs|Win32
@@ -697,6 +699,22 @@ Global
 		{5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|Win32.Build.0 = Release|Win32
 		{5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|x64.ActiveCfg = Release|x64
 		{5D75FBFB-539A-4014-ACEB-520BB1451F00}.Release|x64.Build.0 = Release|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|Win32.ActiveCfg = Debug_NoLibs|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|Win32.Build.0 = Debug_NoLibs|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|x64.ActiveCfg = Debug_NoLibs|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug_NoLibs|x64.Build.0 = Debug_NoLibs|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|Win32.ActiveCfg = Debug|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|Win32.Build.0 = Debug|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|x64.ActiveCfg = Debug|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Debug|x64.Build.0 = Debug|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|Win32.ActiveCfg = Release_NoLibs|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|Win32.Build.0 = Release_NoLibs|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|x64.ActiveCfg = Release_NoLibs|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release_NoLibs|x64.Build.0 = Release_NoLibs|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|Win32.ActiveCfg = Release|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|Win32.Build.0 = Release|Win32
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|x64.ActiveCfg = Release|x64
+		{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}.Release|x64.Build.0 = Release|x64
 	EndGlobalSection
 	GlobalSection(SolutionProperties) = preSolution
 		HideSolutionNode = FALSE
diff --git a/win32/tsk_imageinfo/tsk_imageinfo.vcxproj b/win32/tsk_imageinfo/tsk_imageinfo.vcxproj
new file mode 100644
index 0000000000000000000000000000000000000000..046e460f9f886bb6a8bae54bf63368d3464e2702
--- /dev/null
+++ b/win32/tsk_imageinfo/tsk_imageinfo.vcxproj
@@ -0,0 +1,334 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup Label="ProjectConfigurations">
+    <ProjectConfiguration Include="Debug_NoLibs|Win32">
+      <Configuration>Debug_NoLibs</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug_NoLibs|x64">
+      <Configuration>Debug_NoLibs</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug|Win32">
+      <Configuration>Debug</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Debug|x64">
+      <Configuration>Debug</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release_NoLibs|Win32">
+      <Configuration>Release_NoLibs</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release_NoLibs|x64">
+      <Configuration>Release_NoLibs</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|Win32">
+      <Configuration>Release</Configuration>
+      <Platform>Win32</Platform>
+    </ProjectConfiguration>
+    <ProjectConfiguration Include="Release|x64">
+      <Configuration>Release</Configuration>
+      <Platform>x64</Platform>
+    </ProjectConfiguration>
+  </ItemGroup>
+  <PropertyGroup Label="Globals">
+    <ProjectGuid>{09E82DEB-4BC3-44B5-A9F6-4D65F885F88C}</ProjectGuid>
+    <RootNamespace>tsk_imageinfo</RootNamespace>
+    <WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|Win32'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>true</CLRSupport>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|x64'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>false</CLRSupport>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>true</CLRSupport>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|Win32'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>false</CLRSupport>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>false</CLRSupport>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>false</CLRSupport>
+    <WholeProgramOptimization>true</WholeProgramOptimization>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>true</CLRSupport>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
+    <ConfigurationType>Application</ConfigurationType>
+    <CharacterSet>Unicode</CharacterSet>
+    <CLRSupport>false</CLRSupport>
+    <PlatformToolset>v140_xp</PlatformToolset>
+  </PropertyGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+  <ImportGroup Label="ExtensionSettings">
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|Win32'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|Win32'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="PropertySheets">
+    <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+  </ImportGroup>
+  <PropertyGroup Label="UserMacros" />
+  <PropertyGroup>
+    <_ProjectFileVersion>10.0.30319.1</_ProjectFileVersion>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(OutDir)</OutDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">$(IntDir)</IntDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">$(IntDir)</IntDir>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</LinkIncremental>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">true</LinkIncremental>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(OutDir)</OutDir>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'">$(OutDir)</OutDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">$(IntDir)</IntDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|Win32'">$(IntDir)</IntDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Release|x64'">$(IntDir)</IntDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'">$(IntDir)</IntDir>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</LinkIncremental>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|Win32'">false</LinkIncremental>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</LinkIncremental>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'">false</LinkIncremental>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|Win32'">$(SolutionDir)$(Configuration)\</OutDir>
+    <OutDir Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|x64'">$(OutDir)</OutDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|Win32'">$(IntDir)</IntDir>
+    <IntDir Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|x64'">$(IntDir)</IntDir>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|Win32'">true</LinkIncremental>
+    <LinkIncremental Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|x64'">true</LinkIncremental>
+    <IgnoreImportLibrary Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</IgnoreImportLibrary>
+    <IgnoreImportLibrary Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'">false</IgnoreImportLibrary>
+  </PropertyGroup>
+  <Import Project="$(SolutionDir)\NugetPackages.props" />
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
+    <ClCompile>
+      <Optimization>Disabled</Optimization>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;_ITERATOR_DEBUG_LEVEL=2;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Link>
+      <AdditionalDependencies>libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>$(TskNugetLibs);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <AssemblyDebug>true</AssemblyDebug>
+      <TargetMachine>MachineX86</TargetMachine>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
+    <ClCompile>
+      <Optimization>Disabled</Optimization>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Link>
+      <AdditionalDependencies>libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>$(TskNugetLibs);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <AssemblyDebug>true</AssemblyDebug>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
+    <ClCompile>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0501;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Link>
+      <AdditionalDependencies>libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>$(TskNugetLibs);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <TargetMachine>MachineX86</TargetMachine>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|Win32'">
+    <ClCompile>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+      <CompileAsManaged>false</CompileAsManaged>
+      <ExceptionHandling>Sync</ExceptionHandling>
+      <MinimalRebuild>false</MinimalRebuild>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+    </ClCompile>
+    <Link>
+      <AdditionalDependencies>%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <TargetMachine>MachineX86</TargetMachine>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+    <ClCompile>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Link>
+      <AdditionalDependencies>libvhdi.lib;libvmdk.lib;libewf.lib;zlib.lib;libcrypto.lib;%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>$(TskNugetLibs);%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <OptimizeReferences>true</OptimizeReferences>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release_NoLibs|x64'">
+    <ClCompile>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;NDEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreaded</RuntimeLibrary>
+      <PrecompiledHeader>NotUsing</PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+      <CompileAsManaged>false</CompileAsManaged>
+      <MinimalRebuild>false</MinimalRebuild>
+      <FunctionLevelLinking>true</FunctionLevelLinking>
+      <ExceptionHandling>Async</ExceptionHandling>
+    </ClCompile>
+    <Link>
+      <AdditionalDependencies>%(AdditionalDependencies)</AdditionalDependencies>
+      <AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <OptimizeReferences>true</OptimizeReferences>
+      <EnableCOMDATFolding>true</EnableCOMDATFolding>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|Win32'">
+    <ClCompile>
+      <Optimization>Disabled</Optimization>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDebugDLL</RuntimeLibrary>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Link>
+      <AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <AssemblyDebug>true</AssemblyDebug>
+      <TargetMachine>MachineX86</TargetMachine>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug_NoLibs|x64'">
+    <ClCompile>
+      <Optimization>Disabled</Optimization>
+      <AdditionalIncludeDirectories>$(ProjectDir)\..\..\;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+      <PreprocessorDefinitions>WIN32;_CRT_SECURE_NO_WARNINGS;_DEBUG;_CRT_SECURE_NO_DEPRECATE;WINVER=0x0601;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+      <RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
+      <PrecompiledHeader>
+      </PrecompiledHeader>
+      <WarningLevel>Level3</WarningLevel>
+      <DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
+    </ClCompile>
+    <Link>
+      <AdditionalLibraryDirectories>%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+      <GenerateDebugInformation>true</GenerateDebugInformation>
+      <AssemblyDebug>true</AssemblyDebug>
+      <SubSystem>Console</SubSystem>
+    </Link>
+  </ItemDefinitionGroup>
+  <ItemGroup>
+    <Reference Include="System">
+      <CopyLocalSatelliteAssemblies>true</CopyLocalSatelliteAssemblies>
+      <ReferenceOutputAssembly>true</ReferenceOutputAssembly>
+    </Reference>
+    <Reference Include="System.Data">
+      <CopyLocalSatelliteAssemblies>true</CopyLocalSatelliteAssemblies>
+      <ReferenceOutputAssembly>true</ReferenceOutputAssembly>
+    </Reference>
+    <Reference Include="System.Xml">
+      <CopyLocalSatelliteAssemblies>true</CopyLocalSatelliteAssemblies>
+      <ReferenceOutputAssembly>true</ReferenceOutputAssembly>
+    </Reference>
+  </ItemGroup>
+  <ItemGroup>
+    <ClCompile Include="..\..\tools\autotools\tsk_imageinfo.cpp" />
+  </ItemGroup>
+  <ItemGroup>
+    <ProjectReference Include="..\libtsk\libtsk.vcxproj">
+      <Project>{76efc06c-1f64-4478-abe8-79832716b393}</Project>
+      <ReferenceOutputAssembly>false</ReferenceOutputAssembly>
+    </ProjectReference>
+  </ItemGroup>
+  <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+  <ImportGroup Label="ExtensionTargets">
+  </ImportGroup>
+</Project>
\ No newline at end of file
diff --git a/win32/tsk_imageinfo/tsk_imageinfo.vcxproj.filters b/win32/tsk_imageinfo/tsk_imageinfo.vcxproj.filters
new file mode 100644
index 0000000000000000000000000000000000000000..8e8d4e7431d53e1e092d83f756c6cf1e522eb74b
--- /dev/null
+++ b/win32/tsk_imageinfo/tsk_imageinfo.vcxproj.filters
@@ -0,0 +1,22 @@
+<?xml version="1.0" encoding="utf-8"?>
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+  <ItemGroup>
+    <Filter Include="Source Files">
+      <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
+      <Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
+    </Filter>
+    <Filter Include="Header Files">
+      <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
+      <Extensions>h;hh;hpp;hxx;hm;inl;inc;xsd</Extensions>
+    </Filter>
+    <Filter Include="Resource Files">
+      <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
+      <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
+    </Filter>
+  </ItemGroup>
+  <ItemGroup>
+    <ClCompile Include="..\..\tools\autotools\tsk_imageinfo.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+  </ItemGroup>
+</Project>
\ No newline at end of file