diff --git a/.travis.yml b/.travis.yml
index cbbb15058488d870bfc25d29edd6c69c8d24fd05..ffb01029e81114d22987d409b782affa0d33d077 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,14 +1,15 @@
 language: cpp
+
 matrix:
   include:
   - compiler: clang
     os: linux
-    dist: xenial
+    dist: bionic
     sudo: required
     group: edge
   - compiler: gcc
     os: linux
-    dist: xenial
+    dist: bionic
     sudo: required
     group: edge
   - compiler: clang
@@ -16,11 +17,52 @@ matrix:
   - compiler: gcc
     os: osx
 
+addons:
+  apt:
+    update: true
+    packages:
+    - libafflib-dev
+    - libewf-dev
+    - libpq-dev
+    - autopoint
+    - libsqlite3-dev
+    - ant
+    - libcppunit-dev
+    - wget
+    - openjdk-8-jdk
+    - openjfx=8u161-b12-1ubuntu2
+    - libopenjfx-java=8u161-b12-1ubuntu2
+    - libopenjfx-jni=8u161-b12-1ubuntu2
+  homebrew:
+    update: true
+    packages:
+    - ant
+    - libewf
+    - gettext
+    - cppunit
+    - afflib
+
 python:
   - "2.7"
+
 install:
-  - ./travis_build.sh
+  - ./travis_install_libs.sh
+
+before_script:
+  - if [ $TRAVIS_OS_NAME = linux ]; then 
+        sudo update-alternatives --set java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java;
+        sudo update-alternatives --set javac /usr/lib/jvm/java-8-openjdk-amd64/bin/javac;
+        export PATH=/usr/bin:$PATH; 
+        unset JAVA_HOME;
+    fi
+  - if [ $TRAVIS_OS_NAME = "osx" ]; then
+        export PATH=${PATH}:/usr/local/opt/gettext/bin;
+    fi
 
 script:
-  - make check && if [ -f "tests/test-suite.log" ];then cat tests/test-suite.log; fi ; if [ -f "unit_tests/base/test-suite.log" ];then cat unit_tests/base/test-suite.log; fi 
-  - if test ${TRAVIS_OS_NAME} = "linux"; then cd release; ./release-unix.pl ci; fi
+  - ./bootstrap && ./configure --prefix=/usr && make
+  - pushd bindings/java/ && ant -q dist-PostgreSQL && popd
+  - make check && if [ -f "tests/test-suite.log" ];then cat tests/test-suite.log; fi ; if [ -f "unit_tests/base/test-suite.log" ];then cat unit_tests/base/test-suite.log; fi
+  - if test ${TRAVIS_OS_NAME} = "linux"; then
+        pushd release && ./release-unix.pl ci && popd;
+    fi
diff --git a/Makefile.am b/Makefile.am
index d4ac019589940044053464d9a781c0910a67e255..da70b429a70a73ddb9dde076a62f8026647c6ba1 100644
--- a/Makefile.am
+++ b/Makefile.am
@@ -18,6 +18,7 @@ EXTRA_DIST = README_win32.txt README.md INSTALL.txt ChangeLog.txt NEWS.txt API-C
     bindings/java/src/org/sleuthkit/datamodel/*.java \
     bindings/java/src/org/sleuthkit/datamodel/*.html \
     bindings/java/src/org/sleuthkit/datamodel/*.properties \
+    bindings/java/src/org/sleuthkit/datamodel/blackboardutils/*.java \
     bindings/java/src/org/sleuthkit/datamodel/Examples/*.java \
     bindings/java/src/*.html \
     framework/*.txt \
diff --git a/NEWS.txt b/NEWS.txt
index e20c995e700e5bd2d3949d1074d87a587ae70998..a4a155747a6fec656a047581b3ac0d1baa0d1dcc 100644
--- a/NEWS.txt
+++ b/NEWS.txt
@@ -1,5 +1,31 @@
-Numbers refer to SourceForge.net tracker IDs:
-    http://sourceforge.net/tracker/?group_id=55685
+
+
+---------------- VERSION 4.7.0 --------------
+C/C++:
+- DB schema was expanded to store tsk_events and related tables.
+Time-based data is automatically added when files and artifacts are
+created.  Used by Autopsy timeline.
+- Logical Imager can save files as individual files instead of in
+VHD (saves space).
+- Logical imager produces log of results
+- Logical Imager refactor
+- Removed PRIuOFF and other macros that caused problems with
+signed/unsigned printing. For example, TSK_OFF_T is a signed value
+and PRIuOFF would cause problems as it printed a negative number
+as a big positive number.
+
+
+Java
+- Travis and Debian package use OpenJDK instead of OracleJDK
+- New Blackboard Helper packages (blackboardutils) to make it easier
+to make artifacts.
+- Blackboard scope was expanded, including the new postArtifact() method
+that adds event data to database and broadcasts an event to listeners.
+- SleuthkitCase now has an EventBus for database-related events.
+- New TimelineManager and associated filter classes to support new events 
+table
+
+
 
 ---------------- VERSION 4.6.7 --------------
 C/C++ Code:
diff --git a/bindings/java/build.xml b/bindings/java/build.xml
index 52615de6c7410bce09f210ea57162433e5bcbba3..124bc42d75acfa314c56531f28fce1f244c94266 100644
--- a/bindings/java/build.xml
+++ b/bindings/java/build.xml
@@ -11,7 +11,7 @@
 	<import file="build-${os.family}.xml"/>
 
     <!-- Careful changing this because release-windows.pl updates it by pattern -->
-<property name="VERSION" value="4.6.7"/>
+<property name="VERSION" value="4.7.0"/>
 
 	<!-- set global properties for this build -->
 	<property name="default-jar-location" location="/usr/share/java"/>
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Account.java b/bindings/java/src/org/sleuthkit/datamodel/Account.java
index 647e7397ed87c9928a3e4ccba5a6d41d81779148..bec66e453181c5f31af7afebf726adfb0ddb81c9 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Account.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/Account.java
@@ -54,6 +54,18 @@ public static final class Type {
 		public static final Account.Type WHATSAPP = new Type("WHATSAPP", "WhatsApp");
 		public static final Account.Type MESSAGING_APP = new Type("MESSAGING_APP", "MessagingApp");
 		public static final Account.Type WEBSITE = new Type("WEBSITE", "Website");
+		
+		public static final Account.Type IMO = new Type("IMO", "IMO");
+		public static final Account.Type LINE = new Type("LINE", "LINE");
+		public static final Account.Type SKYPE = new Type("SKYPE", "Skype");
+		public static final Account.Type TANGO = new Type("TANGO", "Tango");
+		public static final Account.Type TEXTNOW = new Type("TEXTNOW", "TextNow");
+		public static final Account.Type THREEMA = new Type("THREEMA", "ThreeMa");
+		public static final Account.Type VIBER = new Type("VIBER", "Viber");
+		
+		public static final Account.Type XENDER = new Type("XENDER", "Xender");
+		public static final Account.Type ZAPYA = new Type("ZAPYA", "Zapya");
+		public static final Account.Type SHAREIT = new Type("SHAREIT", "ShareIt");
 
 		public static final List<Account.Type> PREDEFINED_ACCOUNT_TYPES = Arrays.asList(
 				CREDIT_CARD,
@@ -65,7 +77,17 @@ public static final class Type {
 				INSTAGRAM,
 				WHATSAPP,
 				MESSAGING_APP,
-				WEBSITE
+				WEBSITE,
+				IMO,
+				LINE,
+				SKYPE,
+				TANGO,
+				TEXTNOW,
+				THREEMA,
+				VIBER,
+				XENDER,
+				ZAPYA,
+				SHAREIT
 		);
 
 		private final String typeName;
diff --git a/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java b/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java
index f61f59ab697ebf15f8f41cd3f55974f1ebd4fe6a..a0501eabd9373058e8497a11a0e575ca59b9acde 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/BlackboardAttribute.java
@@ -1326,7 +1326,35 @@ public enum ATTRIBUTE_TYPE {
 		
 		TSK_DATETIME_DELETED(133, "TSK_DATETIME_DELETED", //NON-NLS
 				bundle.getString("BlackboardAttribute.tskdatetimedeleted.text"),
-				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME);
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME),
+		
+		TSK_DATETIME_PASSWORD_RESET(134, "TSK_DATETIME_PASSWORD_RESET",
+				bundle.getString("BlackboardAttribute.tskdatetimepwdreset.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME),
+				
+		TSK_DATETIME_PASSWORD_FAIL(135, "TSK_DATETIME_PWD_FAIL",
+				bundle.getString("BlackboardAttribute.tskdatetimepwdfail.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.DATETIME),
+		
+		TSK_DISPLAY_NAME(136, "TSK_DISPLAY_NAME",
+				bundle.getString("BlackboardAttribute.tskdisplayname.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),
+		
+		TSK_PASSWORD_SETTINGS(137, "TSK_PASSWORD_SETTINGS",
+				bundle.getString("BlackboardAttribute.tskpasswordsettings.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),
+		
+		TSK_ACCOUNT_SETTINGS(138, "TSK_ACCOUNT_SETTINGS",
+				bundle.getString("BlackboardAttribute.tskaccountsettings.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),
+		
+		TSK_PASSWORD_HINT(139, "TSK_PASSWORD_HINT", 
+			bundle.getString("BlackboardAttribute.tskpasswordhint.text"), 
+			TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING),
+		
+		TSK_GROUPS (140, "TSK_GROUPS", 
+				bundle.getString("BlackboardAttribute.tskgroups.text"),
+				TSK_BLACKBOARD_ATTRIBUTE_VALUE_TYPE.STRING);
 
 		private final int typeID;
 		private final String typeName;
diff --git a/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties b/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties
index ebddc29eee9eea5f834352dc04ebaff89cfc308d..9e0e01c75bca51cadc0cc318e21c1f73bd703e06 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties
+++ b/bindings/java/src/org/sleuthkit/datamodel/Bundle.properties
@@ -179,6 +179,13 @@ BlackboardAttribute.tskImsi.text=IMSI
 BlackboardAttribute.tskIccid.text=ICCID
 BlackboardAttribute.tskthreadid.text=Thread ID
 BlackboardAttribute.tskdatetimedeleted.text=Time Deleted
+BlackboardAttribute.tskdatetimepwdreset.text=Password Reset Date
+BlackboardAttribute.tskdatetimepwdfail.text=Password Fail Date
+BlackboardAttribute.tskdisplayname.text=Display Name
+BlackboardAttribute.tskpasswordsettings.text=Password Settings
+BlackboardAttribute.tskaccountsettings.text=Account Settings
+BlackboardAttribute.tskpasswordhint.text=Password Hint
+BlackboardAttribute.tskgroups.text=Groups
 AbstractFile.readLocal.exception.msg4.text=Error reading local file\: {0}
 AbstractFile.readLocal.exception.msg1.text=Error reading local file, local path is not set
 AbstractFile.readLocal.exception.msg2.text=Error reading local file, it does not exist at local path\: {0}
@@ -278,10 +285,9 @@ IngestModuleInfo.IngestModuleType.DataSourceLevel.displayName=Data Source Level
 ReviewStatus.Approved=Approved
 ReviewStatus.Rejected=Rejected
 ReviewStatus.Undecided=Undecided
-DescriptionLOD.short=Short
-DescriptionLOD.medium=Medium
-DescriptionLOD.full=Full
-
+TimelineLevelOfDetail.low=Low
+TimelineLevelOfDetail.medium=Medium
+TimelineLevelOfDetail.high=High
 BaseTypes.fileSystem.name=File System
 BaseTypes.webActivity.name=Web Activity
 BaseTypes.miscTypes.name=Misc Types
@@ -311,20 +317,17 @@ WebTypes.webFormAddress.name=Web Form Address
 CustomTypes.other.name=Other
 CustomTypes.userCreated.name=User Created
 BaseTypes.customTypes.name=Custom Types
-
-
-EventTypeZoomLevel.rootType=Root Type
-EventTypeZoomLevel.baseType=Base Type
-EventTypeZoomLevel.subType=Sub Type
-
+EventTypeHierarchyLevel.root=Root
+EventTypeHierarchyLevel.category=Category
+EventTypeHierarchyLevel.event=Event
 DataSourcesFilter.displayName.text=Data Source
 DescriptionFilter.mode.exclude=Exclude
 DescriptionFilter.mode.include=Include
-hashHitsFilter.displayName.text=Hash Sets
+hashHitsFilter.displayName.text=Hash Hits Only
 hideKnownFilter.displayName.text=Hide Known Files
 # {0} - sub filter displaynames
 IntersectionFilter.displayName.text=Intersection 
-tagsFilter.displayName.text=Tagged Items
+tagsFilter.displayName.text=Tagged Items Only
 TextFilter.displayName.text=Text Filter
 TypeFilter.displayName.text=Event Type
 FileTypesFilter.displayName.text=File Types
\ No newline at end of file
diff --git a/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java b/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java
index b511dcb3ae5d3dcea35693fc01a579e24dbbccfa..f468d6a5f63239c9195fdd6e51362bc2aec87833 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/CommunicationsManager.java
@@ -565,7 +565,7 @@ public org.sleuthkit.datamodel.Account.Type getAccountType(String accountTypeNam
 
 		try {
 			s = connection.createStatement();
-			rs = connection.executeQuery(s, "SELECT account_type_id, type_name, display_name, value_type FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
+			rs = connection.executeQuery(s, "SELECT account_type_id, type_name, display_name FROM account_types WHERE type_name = '" + accountTypeName + "'"); //NON-NLS
 			Account.Type accountType = null;
 			if (rs.next()) {
 				accountType = new Account.Type(accountTypeName, rs.getString("display_name"));
diff --git a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
index ef4f43d7103e6d2720e00de4e32e940e66736702..4b311acc633236e357d4898d8bf5c3d185e3e3e1 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/SleuthkitCase.java
@@ -75,6 +75,7 @@
 import org.sleuthkit.datamodel.TskData.FileKnown;
 import org.sleuthkit.datamodel.TskData.ObjectType;
 import org.sleuthkit.datamodel.TskData.TSK_DB_FILES_TYPE_ENUM;
+import org.sleuthkit.datamodel.TskData.TSK_FS_ATTR_TYPE_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_FLAG_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_META_TYPE_ENUM;
 import org.sleuthkit.datamodel.TskData.TSK_FS_NAME_FLAG_ENUM;
@@ -5556,7 +5557,9 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 			preparedStatement.setShort(2, (short) type.getValue());
 			preparedStatement.setLong(3, sectorSize);
 			preparedStatement.setString(4, timezone);
-			preparedStatement.setLong(5, size);
+			//prevent negative size
+			long savedSize = size < 0 ? 0 : size;
+			preparedStatement.setLong(5, savedSize);
 			preparedStatement.setString(6, md5);
 			preparedStatement.setString(7, sha1);
 			preparedStatement.setString(8, sha256);
@@ -5583,7 +5586,7 @@ public Image addImage(TskData.TSK_IMG_TYPE_ENUM type, long sectorSize, long size
 
 			// Create the new Image object
 			return new Image(this, newObjId, type.getValue(), deviceId, sectorSize, displayName,
-					imagePaths.toArray(new String[imagePaths.size()]), timezone, md5, sha1, sha256, size);
+					imagePaths.toArray(new String[imagePaths.size()]), timezone, md5, sha1, sha256, savedSize);
 		} catch (SQLException ex) {
 			if (!imagePaths.isEmpty()) {
 				throw new TskCoreException(String.format("Error adding image with path %s to database", imagePaths.get(0)), ex);
@@ -5743,6 +5746,117 @@ public FileSystem addFileSystem(long parentObjId, long imgOffset, TskData.TSK_FS
 		}
 	}
 
+	/**
+	 * Add a file system file.
+	 * 
+	 * @param dataSourceObjId	The object id of the root data source of this
+	 *							file.
+	 * @param fsObjId		The file system object id.
+	 * @param fileName		The name of the file.
+	 * @param metaAddr		The meta address of the file.
+	 * @param metaSeq		The meta address sequence of the file.
+	 * @param attrType		The attributed type of the file.
+	 * @param attrId		The attribute id 
+	 * @param dirFlag		The allocated status from the name structure
+	 * @param metaFlags
+	 * @param size			The size of the file in bytes.
+	 * @param ctime			The changed time of the file.
+	 * @param crtime		The creation time of the file.
+	 * @param atime			The accessed time of the file
+	 * @param mtime			The modified time of the file.
+	 ** @param isFile		True, unless the file is a directory.
+	 * @param parent		The parent of the file (e.g., a virtual directory)
+	 * 
+	 * @return Newly created file
+	 * 
+	 * @throws TskCoreException 
+	 */
+	public FsContent addFileSystemFile(long dataSourceObjId, long fsObjId, 
+										String fileName,
+										long metaAddr, int metaSeq,
+										TSK_FS_ATTR_TYPE_ENUM attrType, int attrId,
+										TSK_FS_NAME_FLAG_ENUM dirFlag, short metaFlags, long size, 
+										long ctime, long crtime, long atime, long mtime,
+										boolean isFile, Content parent) throws TskCoreException {
+		
+		CaseDbTransaction transaction = beginTransaction();
+		Statement queryStatement = null;
+		try {
+			CaseDbConnection connection = transaction.getConnection();
+			transaction.acquireSingleUserCaseWriteLock();
+
+			// Insert a row for the local/logical file into the tsk_objects table.
+			// INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)
+			long objectId = addObject(parent.getId(), TskData.ObjectType.ABSTRACTFILE.getObjectType(), connection);
+			
+			String parentPath;
+			
+			if (parent instanceof AbstractFile) {
+				AbstractFile parentFile = (AbstractFile) parent;
+				if (isRootDirectory(parentFile, transaction)) {
+					parentPath = "/";
+				} else {
+					parentPath = parentFile.getParentPath() + parent.getName() + "/"; //NON-NLS
+				}
+			} else {
+				parentPath = "/";
+			}
+	
+			PreparedStatement statement = connection.getPreparedStatement(PREPARED_STATEMENT.INSERT_FILE_SYSTEM_FILE);
+			statement.clearParameters();
+			statement.setLong(1, objectId);											// obj_is
+			statement.setLong(2, fsObjId);											// fs_obj_id 
+			statement.setLong(3, dataSourceObjId);									// data_source_obj_id 
+			statement.setShort(4, (short)attrType.getValue());						// attr_type
+			statement.setInt(5, attrId);											// attr_id
+			statement.setString(6, fileName);										// name
+			statement.setLong(7, metaAddr);											// meta_addr
+			statement.setInt(8, metaSeq);											// meta_addr
+			statement.setShort(9, TskData.TSK_DB_FILES_TYPE_ENUM.FS.getFileType());	//type
+			statement.setShort(10, (short) 1);										// has_path
+			TSK_FS_NAME_TYPE_ENUM dirType = isFile ? TSK_FS_NAME_TYPE_ENUM.REG : TSK_FS_NAME_TYPE_ENUM.DIR;
+			statement.setShort(11, dirType.getValue());								// dir_type
+			TSK_FS_META_TYPE_ENUM metaType = isFile ? TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG : TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_DIR;
+			statement.setShort(12, metaType.getValue());							// meta_type
+			statement.setShort(13, dirFlag.getValue());								// dir_flags
+			statement.setShort(14, metaFlags);										// meta_flags
+			statement.setLong(15,  size < 0 ? 0 : size);
+			statement.setLong(16, ctime);
+			statement.setLong(17, crtime);
+			statement.setLong(18, atime);
+			statement.setLong(19, mtime);
+			statement.setString(20, parentPath);
+			final String extension = extractExtension(fileName);
+			statement.setString(21, extension);
+
+			connection.executeUpdate(statement);
+			
+			transaction.commit();
+			transaction = null;
+			
+			return new org.sleuthkit.datamodel.File(this, objectId, dataSourceObjId, fsObjId,
+			attrType, attrId, fileName, metaAddr, metaSeq,
+			dirType, metaType, dirFlag, metaFlags,
+			size, ctime, crtime, atime, mtime,
+			(short)0, 0, 0, null, null, parentPath, null,
+			extension);
+			
+		} catch(SQLException ex) {
+			logger.log(Level.WARNING, "Failed to add file system file", ex);
+		} 
+		finally {
+			closeStatement(queryStatement);
+			if (null != transaction) {
+				try {
+					transaction.rollback();
+				} catch (TskCoreException ex2) {
+					logger.log(Level.SEVERE, "Failed to rollback transaction after exception", ex2);
+				}
+			}
+		}
+		return null;
+	}
+
 	/**
 	 * Get IDs of the virtual folder roots (at the same level as image), used
 	 * for containers such as for local files.
@@ -6187,7 +6301,9 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 			statement.setShort(9, metaFlags);
 
 			//size
-			statement.setLong(10, size);
+			//prevent negative size
+			long savedSize = size < 0 ? 0 : size;
+			statement.setLong(10, savedSize);
 
 			//mactimes
 			//long ctime, long crtime, long atime, long mtime,
@@ -6216,9 +6332,9 @@ public DerivedFile addDerivedFile(String fileName, String localPath,
 			addFilePath(connection, newObjId, localPath, encodingType);
 
 			DerivedFile derivedFile = new DerivedFile(this, newObjId, dataSourceObjId, fileName, dirType, metaType, dirFlag, metaFlags,
-					size, ctime, crtime, atime, mtime, null, null, parentPath, localPath, parentId, null, encodingType, extension);
+					savedSize, ctime, crtime, atime, mtime, null, null, parentPath, localPath, parentId, null, encodingType, extension);
 
-			timelineManager.addAbstractFileEvents(derivedFile, connection);
+			timelineManager.addEventsForNewFile(derivedFile, connection);
 			transaction.commit();
 			//TODO add derived method to tsk_files_derived and tsk_files_derived_method
 			return derivedFile;
@@ -6305,7 +6421,9 @@ public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath,
 			statement.setShort(5, metaFlags);
 
 			//size
-			statement.setLong(6, size);
+			//prevent negative size
+			long savedSize = size < 0 ? 0 : size;
+			statement.setLong(6, savedSize);
 
 			//mactimes
 			//long ctime, long crtime, long atime, long mtime,
@@ -6325,7 +6443,7 @@ public DerivedFile updateDerivedFile(DerivedFile derivedFile, String localPath,
 			long dataSourceObjId = getDataSourceObjectId(connection, parentId);
 			final String extension = extractExtension(derivedFile.getName());
 			return new DerivedFile(this, derivedFile.getId(), dataSourceObjId, derivedFile.getName(), dirType, metaType, dirFlag, metaFlags,
-					size, ctime, crtime, atime, mtime, null, null, parentPath, localPath, parentId, null, encodingType, extension);
+					savedSize, ctime, crtime, atime, mtime, null, null, parentPath, localPath, parentId, null, encodingType, extension);
 		} catch (SQLException ex) {
 			connection.rollbackTransaction();
 			throw new TskCoreException("Failed to add derived file to case database", ex);
@@ -6474,7 +6592,9 @@ public LocalFile addLocalFile(String fileName, String localPath,
 			statement.setShort(8, dirFlag.getValue());
 			short metaFlags = (short) (TSK_FS_META_FLAG_ENUM.ALLOC.getValue() | TSK_FS_META_FLAG_ENUM.USED.getValue());
 			statement.setShort(9, metaFlags);
-			statement.setLong(10, size);
+			//prevent negative size
+			long savedSize = size < 0 ? 0 : size;
+			statement.setLong(10, savedSize);
 			statement.setLong(11, ctime);
 			statement.setLong(12, crtime);
 			statement.setLong(13, atime);
@@ -6516,14 +6636,14 @@ public LocalFile addLocalFile(String fileName, String localPath,
 					metaType,
 					dirFlag,
 					metaFlags,
-					size,
+					savedSize,
 					ctime, crtime, atime, mtime,
 					mimeType, md5, known,
 					parent.getId(), parentPath,
 					dataSourceObjId,
 					localPath,
 					encodingType, extension);
-			getTimelineManager().addAbstractFileEvents(localFile, connection);
+			getTimelineManager().addEventsForNewFile(localFile, connection);
 			return localFile;
 
 		} catch (SQLException ex) {
@@ -6663,7 +6783,9 @@ public LayoutFile addLayoutFile(String fileName,
 			prepStmt.setShort(7, TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG.getValue()); // meta_type
 			prepStmt.setShort(8, dirFlag.getValue()); // dir_flags
 			prepStmt.setShort(9, metaFlag.getValue()); // meta_flags
-			prepStmt.setLong(10, size);   // size
+			//prevent negative size
+			long savedSize = size < 0 ? 0 : size;
+			prepStmt.setLong(10, savedSize);   // size
 			prepStmt.setLong(11, ctime);  // ctime
 			prepStmt.setLong(12, crtime); // crtime
 			prepStmt.setLong(13, atime);  // atime
@@ -6704,7 +6826,7 @@ public LayoutFile addLayoutFile(String fileName,
 					TSK_FS_META_TYPE_ENUM.TSK_FS_META_TYPE_REG,
 					dirFlag,
 					metaFlag.getValue(),
-					size,
+					savedSize,
 					ctime, crtime, atime, mtime,
 					null,
 					FileKnown.UNKNOWN,
@@ -10487,6 +10609,8 @@ private enum PREPARED_STATEMENT {
 		INSERT_OBJECT("INSERT INTO tsk_objects (par_obj_id, type) VALUES (?, ?)"), //NON-NLS
 		INSERT_FILE("INSERT INTO tsk_files (obj_id, fs_obj_id, name, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, md5, known, mime_type, parent_path, data_source_obj_id,extension) " //NON-NLS
 				+ "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), //NON-NLS
+		INSERT_FILE_SYSTEM_FILE("INSERT INTO tsk_files(obj_id, fs_obj_id, data_source_obj_id, attr_type, attr_id, name, meta_addr, meta_seq, type, has_path, dir_type, meta_type, dir_flags, meta_flags, size, ctime, crtime, atime, mtime, parent_path, extension)"
+				+ " VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"), // NON-NLS
 		UPDATE_DERIVED_FILE("UPDATE tsk_files SET type = ?, dir_type = ?, meta_type = ?, dir_flags = ?,  meta_flags = ?, size= ?, ctime= ?, crtime= ?, atime= ?, mtime= ?, mime_type = ?  "
 				+ "WHERE obj_id = ?"), //NON-NLS
 		INSERT_LAYOUT_FILE("INSERT INTO tsk_file_layout (obj_id, byte_start, byte_len, sequence) " //NON-NLS
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEvent.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEvent.java
index 2bb28a562df8252a36a7005aff854deba063648e..23e5f376e124b270d039e157eafd60026e4a9bbf 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEvent.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEvent.java
@@ -19,215 +19,233 @@
 package org.sleuthkit.datamodel;
 
 import java.util.Optional;
-import java.util.ResourceBundle;
-import static org.sleuthkit.datamodel.TimelineEventType.TypeLevel.SUB_TYPE;
 
 /**
- * A single event.
+ * A representation of an event in the timeline of a case.
  */
 public final class TimelineEvent {
 
+	/**
+	 * The unique ID of this event in the case database.
+	 */
 	private final long eventID;
+
 	/**
-	 * The TSK object ID of the file this event is derived from.
+	 * The object ID of the content that is either the direct or indirect source
+	 * of this event. For events associated with files, this will be the object
+	 * ID of the file. For events associated with artifacts, this will be the
+	 * object ID of the artifact source: a file, a data source, or another
+	 * artifact.
 	 */
-	private final long fileObjID;
+	private final long contentObjID;
 
 	/**
-	 * The TSK artifact ID of the file this event is derived from. Null, if this
-	 * event is not derived from an artifact.
+	 * The artifact ID (not the object ID) of the artifact, if any, that is the
+	 * source of this event. Null for events assoicated directly with files.
 	 */
 	private final Long artifactID;
 
 	/**
-	 * The TSK datasource ID of the datasource this event belongs to.
+	 * The object ID of the data source for the event source.
 	 */
 	private final long dataSourceObjID;
 
 	/**
-	 * The time of this event in second from the Unix epoch.
+	 * When this event occurred, in seconds from the UNIX epoch.
 	 */
 	private final long time;
+
 	/**
 	 * The type of this event.
 	 */
 	private final TimelineEventType type;
 
 	/**
-	 * The three descriptions (full, med, short) stored in a map, keyed by
- DescriptionLOD (TypeLevel of Detail)
+	 * The description of this event, provided at three levels of detail: high
+	 * (full description), medium (medium description), and low (short
+	 * description).
 	 */
 	private final TimelineEventDescription descriptions;
 
 	/**
-	 * True if the file this event is derived from hits any of the configured
-	 * hash sets.
+	 * True if the file, if any, associated with this event, either directly or
+	 * indirectly, is a file for which a hash set hit has been detected.
 	 */
-	private final boolean hashHit;
+	private final boolean eventSourceHashHitDetected;
 
 	/**
-	 * True if the file or artifact this event is derived from is tagged.
+	 * True if the direct source (file or artifact) of this event has been
+	 * tagged.
 	 */
-	private final boolean tagged;
+	private final boolean eventSourceTagged;
 
 	/**
+	 * Constructs a representation of an event in the timeline of a case.
 	 *
-	 * @param eventID         ID from tsk_events table in database
-	 * @param dataSourceObjID Object Id for data source event is from
-	 * @param fileObjID       object id for non-artifact content that event is
-	 *                        associated with
-	 * @param artifactID      ID of artifact (not object id) if event came from
-	 *                        an artifact
-	 * @param time
-	 * @param type
-	 * @param descriptions
-	 * @param hashHit
-	 * @param tagged
+	 * @param eventID                    The unique ID of this event in the case
+	 *                                   database.
+	 * @param dataSourceObjID            The object ID of the data source for
+	 *                                   the event source.
+	 * @param contentObjID               The object ID of the content that is
+	 *                                   either the direct or indirect source of
+	 *                                   this event. For events associated with
+	 *                                   files, this will be the object ID of
+	 *                                   the file. For events associated with
+	 *                                   artifacts, this will be the object ID
+	 *                                   of the artifact source: a file, a data
+	 *                                   source, or another artifact.
+	 * @param artifactID                 The artifact ID (not the object ID) of
+	 *                                   the artifact, if any, that is the
+	 *                                   source of this event. Null for events
+	 *                                   assoicated directly with files.
+	 * @param time                       The time this event occurred, in
+	 *                                   seconds from the UNIX epoch.
+	 * @param type                       The type of this event.
+	 * @param fullDescription            The full length description of this
+	 *                                   event.
+	 * @param medDescription             The medium length description of this
+	 *                                   event.
+	 * @param shortDescription           The short length description of this
+	 *                                   event.
+	 * @param eventSourceHashHitDetected True if the file, if any, associated
+	 *                                   with this event, either directly or
+	 *                                   indirectly, is a file for which a hash
+	 *                                   set hit has been detected.
+	 * @param eventSourceTagged          True if the direct source (file or
+	 *                                   artifact) of this event has been
+	 *                                   tagged.
 	 */
-	TimelineEvent(long eventID, long dataSourceObjID, long fileObjID, Long artifactID,
-			long time, TimelineEventType type,
+	TimelineEvent(long eventID,
+			long dataSourceObjID,
+			long contentObjID,
+			Long artifactID,
+			long time,
+			TimelineEventType type,
 			String fullDescription,
 			String medDescription,
 			String shortDescription,
-			boolean hashHit, boolean tagged) {
+			boolean eventSourceHashHitDetected,
+			boolean eventSourceTagged) {
 		this.eventID = eventID;
 		this.dataSourceObjID = dataSourceObjID;
-		this.fileObjID = fileObjID;
+		this.contentObjID = contentObjID;
 		this.artifactID = Long.valueOf(0).equals(artifactID) ? null : artifactID;
 		this.time = time;
 		this.type = type;
-		// This isn't the best design, but it was the most expediant way to reduce 
-		// the public API (by keeping parseDescription()) out of the public API.  
+		/*
+		 * The cast that follows reflects the fact that we have not decided
+		 * whether or not to add the parseDescription method to the
+		 * TimelineEventType interface yet. Currently (9/18/19), this method is
+		 * part of TimelineEventTypeImpl and all implementations of
+		 * TimelineEventType are subclasses of TimelineEventTypeImpl.
+		 */
 		if (type instanceof TimelineEventTypeImpl) {
-			this.descriptions = ((TimelineEventTypeImpl)type).parseDescription(fullDescription, medDescription, shortDescription);
-		}
-		else {
-			throw new IllegalArgumentException();
+			this.descriptions = ((TimelineEventTypeImpl) type).parseDescription(fullDescription, medDescription, shortDescription);
+		} else {
+			this.descriptions = new TimelineEventDescription(fullDescription, medDescription, shortDescription);
 		}
-		this.hashHit = hashHit;
-		this.tagged = tagged;
+		this.eventSourceHashHitDetected = eventSourceHashHitDetected;
+		this.eventSourceTagged = eventSourceTagged;
 	}
 
 	/**
-	 * Is the file or artifact this event is derived from tagged?
+	 * Indicates whether or not the direct source (file or artifact) of this
+	 * artifact has been tagged.
 	 *
-	 * @return true if he file or artifact this event is derived from is tagged.
+	 * @return True or false.
 	 */
-	public boolean isTagged() {
-		return tagged;
+	public boolean eventSourceIsTagged() {
+		return eventSourceTagged;
 	}
 
 	/**
-	 * Is the file this event is derived from in any of the configured hash
-	 * sets.
-	 *
+	 * Indicates whether or not the file, if any, associated with this event,
+	 * either directly or indirectly, is a file for which a hash set hit has
+	 * been detected.
 	 *
-	 * @return True if the file this event is derived from is in any of the
-	 *         configured hash sets.
+	 * @return True or false.
 	 */
-	public boolean isHashHit() {
-		return hashHit;
+	public boolean eventSourceHasHashHits() {
+		return eventSourceHashHitDetected;
 	}
 
 	/**
-	 * Get the artifact id (not the object ID) of the artifact this event is
-	 * derived from.
+	 * Gets the artifact ID (not object ID) of the artifact, if any, that is the
+	 * direct source of this event.
 	 *
-	 * @return An Optional containing the artifact ID. Will be empty if this
-	 *         event is not derived from an artifact
+	 * @return An Optional object containing the artifact ID. May be empty.
 	 */
 	public Optional<Long> getArtifactID() {
 		return Optional.ofNullable(artifactID);
 	}
 
 	/**
-	 * Get the event id of this event.
+	 * Gets the unique ID of this event in the case database.
 	 *
-	 * @return The event id of this event.
+	 * @return The event ID.
 	 */
 	public long getEventID() {
 		return eventID;
 	}
 
 	/**
-	 * Get the Content obj id of the "file" (which could be a data source or
-	 * other non AbstractFile ContentS) this event is derived from.
+	 * Gets the object ID of the content that is the direct or indirect source
+	 * of this event. For events associated with files, this will be the object
+	 * ID of the file that is the direct event source. For events associated
+	 * with artifacts, this will be the object ID of the artifact source: a
+	 * file, a data source, or another artifact.
 	 *
-	 * @return the object id.
+	 * @return The object ID.
 	 */
-	public long getFileObjID() {
-		return fileObjID;
+	public long getContentObjID() {
+		return contentObjID;
 	}
 
 	/**
-	 * Get the time of this event (in seconds from the Unix epoch).
+	 * Gets the time this event occurred.
 	 *
-	 * @return the time of this event in seconds from Unix epoch
+	 * @return The time this event occurred, in seconds from UNIX epoch.
 	 */
 	public long getTime() {
 		return time;
 	}
 
-	public TimelineEventType getEventType() {
-		return type;
-	}
-
-	public TimelineEventType getEventType(TimelineEventType.TypeLevel zoomLevel) {
-		return zoomLevel.equals(SUB_TYPE) ? type : type.getBaseType();
-	}
-
-	/**
-	 * Get the full description of this event.
-	 *
-	 * @return the full description
-	 */
-	public String getFullDescription() {
-		return getDescription(TimelineEvent.DescriptionLevel.FULL);
-	}
-
-	/**
-	 * Get the medium description of this event.
-	 *
-	 * @return the medium description
-	 */
-	public String getMedDescription() {
-		return getDescription(TimelineEvent.DescriptionLevel.MEDIUM);
-	}
-
 	/**
-	 * Get the short description of this event.
+	 * Gets the type of this event.
 	 *
-	 * @return the short description
+	 * @return The event type.
 	 */
-	public String getShortDescription() {
-		return getDescription(TimelineEvent.DescriptionLevel.SHORT);
+	public TimelineEventType getEventType() {
+		return type;
 	}
 
 	/**
-	 * Get the description of this event at the give level of detail(LoD).
+	 * Gets the description of this event at a given level of detail.
 	 *
-	 * @param lod The level of detail to get.
+	 * @param levelOfDetail The desired level of detail.
 	 *
 	 * @return The description of this event at the given level of detail.
 	 */
-	public String getDescription(TimelineEvent.DescriptionLevel lod) {
-		return descriptions.getDescription(lod);
+	public String getDescription(TimelineLevelOfDetail levelOfDetail) {
+		return descriptions.getDescription(levelOfDetail);
 	}
 
 	/**
-	 * Get the datasource id of the datasource this event belongs to.
+	 * Gets the object ID of the data source for the source content of this
+	 * event.
 	 *
-	 * @return the datasource id.
+	 * @return The data source object ID.
 	 */
 	public long getDataSourceObjID() {
 		return dataSourceObjID;
 	}
 
-	public long getEndMillis() {
-		return time * 1000;
-	}
-
-	public long getStartMillis() {
+	/**
+	 * Gets the time this event occured, in milliseconds from the UNIX epoch.
+	 *
+	 * @return The event time in milliseconds from the UNIX epoch.
+	 */
+	public long getEventTimeInMs() {
 		return time * 1000;
 	}
 
@@ -247,42 +265,7 @@ public boolean equals(Object obj) {
 			return false;
 		}
 		final TimelineEvent other = (TimelineEvent) obj;
-		return this.eventID == other.eventID;
+		return this.eventID == other.getEventID();
 	}
 
-	/**
-	 * Defines the zoom levels that are available for the event description
-	 */
-	public enum DescriptionLevel {
-		SHORT(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("DescriptionLOD.short")),
-		MEDIUM(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("DescriptionLOD.medium")),
-		FULL(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("DescriptionLOD.full"));
-
-		private final String displayName;
-
-		public String getDisplayName() {
-			return displayName;
-		}
-
-		private DescriptionLevel(String displayName) {
-			this.displayName = displayName;
-		}
-
-		public DescriptionLevel moreDetailed() {
-			try {
-				return values()[ordinal() + 1];
-			} catch (ArrayIndexOutOfBoundsException e) {
-				return null;
-			}
-		}
-
-		public DescriptionLevel lessDetailed() {
-			try {
-				return values()[ordinal() - 1];
-			} catch (ArrayIndexOutOfBoundsException e) {
-				return null;
-			}
-		}
-
-	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeImpl.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeImpl.java
index bee1ca8845cca7ee63e1849c06ba9c1439f2ac1a..0f35b1c2cbf17e49faab8eddfd68a9b03c375d2c 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeImpl.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventArtifactTypeImpl.java
@@ -29,16 +29,23 @@
 /**
  * Version of TimelineEventType for events based on artifacts
  */
-class TimelineEventArtifactTypeImpl extends TimelineEventTypeImpl { 
+class TimelineEventArtifactTypeImpl extends TimelineEventTypeImpl {
 
 	private static final Logger logger = Logger.getLogger(TimelineEventArtifactTypeImpl.class.getName());
-
+	
+	static final int EMAIL_FULL_DESCRIPTION_LENGTH_MAX = 150;
+	static final int EMAIL_TO_FROM_LENGTH_MAX = 75;
+	
 	private final BlackboardArtifact.Type artifactType;
 	private final BlackboardAttribute.Type dateTimeAttributeType;
 	private final TSKCoreCheckedFunction<BlackboardArtifact, String> fullExtractor;
 	private final TSKCoreCheckedFunction<BlackboardArtifact, String> medExtractor;
 	private final TSKCoreCheckedFunction<BlackboardArtifact, String> shortExtractor;
 	private final TSKCoreCheckedFunction<BlackboardArtifact, TimelineEventDescriptionWithTime> artifactParsingFunction;
+	
+	private static final int MAX_SHORT_DESCRIPTION_LENGTH = 500;
+	private static final int MAX_MED_DESCRIPTION_LENGTH = 500;
+	private static final int MAX_FULL_DESCRIPTION_LENGTH = 1024;
 
 	TimelineEventArtifactTypeImpl(int typeID, String displayName,
 			TimelineEventType superType,
@@ -59,7 +66,7 @@ class TimelineEventArtifactTypeImpl extends TimelineEventTypeImpl {
 			TSKCoreCheckedFunction<BlackboardArtifact, String> fullExtractor,
 			TSKCoreCheckedFunction<BlackboardArtifact, TimelineEventDescriptionWithTime> eventPayloadFunction) {
 
-		super(typeID, displayName, TimelineEventType.TypeLevel.SUB_TYPE, superType);
+		super(typeID, displayName, TimelineEventType.HierarchyLevel.EVENT, superType);
 		this.artifactType = artifactType;
 		this.dateTimeAttributeType = dateTimeAttributeType;
 		this.shortExtractor = shortExtractor;
@@ -102,13 +109,14 @@ BlackboardArtifact.Type getArtifactType() {
 		return artifactType;
 	}
 
-	
 	/**
 	 * Parses the artifact to create a triple description with a time.
-	 * 
+	 *
 	 * @param artifact
+	 *
 	 * @return
-	 * @throws TskCoreException 
+	 *
+	 * @throws TskCoreException
 	 */
 	TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact artifact) throws TskCoreException {
 		//if we got passed an artifact that doesn't correspond to this event type, 
@@ -122,7 +130,9 @@ TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact artifac
 			return null;
 		}
 
-		/* Use the type-specific method */
+		/*
+		 * Use the type-specific method
+		 */
 		if (this.artifactParsingFunction != null) {
 			//use the hook provided by this subtype implementation to build the descriptions.
 			return this.artifactParsingFunction.apply(artifact);
@@ -130,8 +140,20 @@ TimelineEventDescriptionWithTime makeEventDescription(BlackboardArtifact artifac
 
 		//combine descriptions in standard way
 		String shortDescription = extractShortDescription(artifact);
+		if (shortDescription.length() > MAX_SHORT_DESCRIPTION_LENGTH) {
+			shortDescription = shortDescription.substring(0, MAX_SHORT_DESCRIPTION_LENGTH);
+		}
+
 		String medDescription = shortDescription + " : " + extractMedDescription(artifact);
+		if (medDescription.length() > MAX_MED_DESCRIPTION_LENGTH) {
+			medDescription = medDescription.substring(0, MAX_MED_DESCRIPTION_LENGTH);
+		}
+
 		String fullDescription = medDescription + " : " + extractFullDescription(artifact);
+		if (fullDescription.length() > MAX_FULL_DESCRIPTION_LENGTH) {
+			fullDescription = fullDescription.substring(0, MAX_FULL_DESCRIPTION_LENGTH);
+		}
+		
 		return new TimelineEventDescriptionWithTime(timeAttribute.getValueLong(), shortDescription, medDescription, fullDescription);
 	}
 
@@ -204,6 +226,7 @@ public String apply(BlackboardArtifact artf) throws TskCoreException {
 	 */
 	@FunctionalInterface
 	interface TSKCoreCheckedFunction<I, O> {
+
 		O apply(I input) throws TskCoreException;
 	}
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventDescription.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventDescription.java
index 1a58ee2c9ffa61720004b47d3f523a0b4181553d..0adf594c74edc009f4e0a5e10b67811250eb3c2c 100755
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventDescription.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventDescription.java
@@ -19,21 +19,40 @@
 package org.sleuthkit.datamodel;
 
 /**
- * Encapsulates the potential multiple levels of description for an event in to
- * one object. Currently used for interim storage.
+ * A container for a timeline event description with potentially varying levels
+ * of detail.
  */
 class TimelineEventDescription {
 
-	String shortDesc;
-	String mediumDesc;
-	String fullDesc;
+	private final String shortDesc;
+	private final String mediumDesc;
+	private final String fullDesc;
 
+	/**
+	 * Constructs a container for a timeline event description that varies with
+	 * each of three levels of detail.
+	 *
+	 * @param fullDescription  The full length description of an event for use
+	 *                         at a high level of detail.
+	 * @param medDescription   The medium length description of an event for use
+	 *                         at a medium level of detail.
+	 * @param shortDescription The short length description of an event for use
+	 *                         at a low level of detail.
+	 */
 	TimelineEventDescription(String fullDescription, String medDescription, String shortDescription) {
 		this.shortDesc = shortDescription;
 		this.mediumDesc = medDescription;
 		this.fullDesc = fullDescription;
 	}
 
+	/**
+	 * Constructs a container for a timeline event description for the high
+	 * level of detail. The descriptions for the low and medium levels of detail
+	 * will be the empty string.
+	 *
+	 * @param fullDescription The full length description of an event for use at
+	 *                        a high level of detail.
+	 */
 	TimelineEventDescription(String fullDescription) {
 		this.shortDesc = "";
 		this.mediumDesc = "";
@@ -41,48 +60,22 @@ class TimelineEventDescription {
 	}
 
 	/**
-	 * Get the full description of this event.
-	 *
-	 * @return the full description
-	 */
-	String getFullDescription() {
-		return fullDesc;
-	}
-
-	/**
-	 * Get the medium description of this event.
-	 *
-	 * @return the medium description
-	 */
-	String getMediumDescription() {
-		return mediumDesc;
-	}
-
-	/**
-	 * Get the short description of this event.
-	 *
-	 * @return the short description
-	 */
-	String getShortDescription() {
-		return shortDesc;
-	}
-
-	/**
-	 * Get the description of this event at the give level of detail(LoD).
+	 * Gets the description of this event at the given level of detail.
 	 *
-	 * @param lod The level of detail to get.
+	 * @param levelOfDetail The level of detail.
 	 *
-	 * @return The description of this event at the given level of detail.
+	 * @return The event description at the given level of detail.
 	 */
-	String getDescription(TimelineEvent.DescriptionLevel lod) {
-		switch (lod) {
-			case FULL:
+	String getDescription(TimelineLevelOfDetail levelOfDetail) {
+		switch (levelOfDetail) {
+			case HIGH:
 			default:
-				return getFullDescription();
+				return this.fullDesc;
 			case MEDIUM:
-				return getMediumDescription();
-			case SHORT:
-				return getShortDescription();
+				return this.mediumDesc;
+			case LOW:
+				return this.shortDesc;
 		}
 	}
+	
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java
index 4d1563b7957a805defa02c5824541ce57353a9d9..3eff0b5afdbcf78583ca1120921a063d501a4da2 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventType.java
@@ -18,6 +18,7 @@
  */
 package org.sleuthkit.datamodel;
 
+import com.google.common.annotations.Beta;
 import com.google.common.base.MoreObjects;
 import com.google.common.collect.ImmutableSortedSet;
 import java.util.Arrays;
@@ -37,95 +38,140 @@
 import static org.sleuthkit.datamodel.TimelineEventArtifactTypeImpl.getAttributeSafe;
 
 /**
- * Interface for distinct kinds of events (ie file system or web
- * activity) in a hierarchy. An TimelineEventType may have an optional 
- super-type and 0 or more subtypes.   NOTE: this is not currently
- extensible by modules. The structure is hard coded to a certain
- number of levels and types. 
+ * An interface implemented by timeline event types. Timeline event types are
+ * organized into a type hierarchy. This type hierarchy has three levels: the
+ * root level, the category level (e.g, file system events, web activity
+ * events), and the actual event level (e.g., file modified events, web download
+ * events).
+ *
+ * Currently (9/20/19), all supported timeline event types are defined as
+ * members of this interface.
+ *
+ * WARNING: THIS INTERFACE IS A "BETA" INTERFACE AND IS SUBJECT TO CHANGE AT ANY
+ * TIME.
  */
+@Beta
 public interface TimelineEventType extends Comparable<TimelineEventType> {
-	
-	static final int EMAIL_FULL_DESCRIPTION_LENGTH_MAX = 150;
 
+	/**
+	 * Gets the display name of this event type.
+	 *
+	 * @return The event type display name.
+	 */
 	String getDisplayName();
 
 	/**
-	 * 
-	 * @return Unique type iD (from database)
+	 * Gets the unique ID of this event type in the case database.
+	 *
+	 * @return The event type ID.
 	 */
 	long getTypeID();
 
 	/**
-	 * 
-	 * @return The level that this event is in the type hierarchy.
+	 * Gets the type hierarchy level of this event type.
+	 *
+	 * @return The type hierarchy level.
 	 */
-	TimelineEventType.TypeLevel getTypeLevel();
+	TimelineEventType.HierarchyLevel getTypeHierarchyLevel();
 
 	/**
-	 * @return A list of TimelineEventTypes, one for each subtype of this EventTYpe, or
-         an empty set if this TimelineEventType has no subtypes.
+	 * Gets the child event types of this event type in the type hierarchy.
+	 *
+	 * @return A sorted set of the child event types.
 	 */
-	SortedSet<? extends TimelineEventType> getSubTypes();
-
-	Optional<? extends TimelineEventType> getSubType(String string);
-
+	SortedSet<? extends TimelineEventType> getChildren();
 
 	/**
-	 * @return the super type of this event
+	 * Gets a specific child event type of this event type in the type
+	 * hierarchy.
+	 *
+	 * @param displayName The display name of the desired child event type.
+	 *
+	 * @return The child event type in an Optional object, may be empty.
 	 */
-	TimelineEventType getSuperType();
+	Optional<? extends TimelineEventType> getChild(String displayName);
 
-	default TimelineEventType getBaseType() {
-		TimelineEventType superType = getSuperType();
+	/**
+	 * Gets the parent event type of this event type in the type hierarchy.
+	 *
+	 * @return The parent event type.
+	 */
+	TimelineEventType getParent();
 
-		return superType.equals(ROOT_EVENT_TYPE)
+	/**
+	 * Gets the category level event type for this event type in the type
+	 * hierarchy.
+	 *
+	 * @return The category event type.
+	 */
+	default TimelineEventType getCategory() {
+		TimelineEventType parentType = getParent();
+		return parentType.equals(ROOT_EVENT_TYPE)
 				? this
-				: superType.getBaseType();
-
+				: parentType.getCategory();
 	}
 
-	default SortedSet<? extends TimelineEventType> getSiblingTypes() {
+	/**
+	 * Gets the sibling event types of this event type in the type hierarchy.
+	 *
+	 * @return The sibling event types.
+	 */
+	default SortedSet<? extends TimelineEventType> getSiblings() {
 		return this.equals(ROOT_EVENT_TYPE)
 				? ImmutableSortedSet.of(ROOT_EVENT_TYPE)
-				: this.getSuperType().getSubTypes();
-
+				: this.getParent().getChildren();
 	}
 
 	@Override
 	default int compareTo(TimelineEventType otherType) {
 		return Comparator.comparing(TimelineEventType::getTypeID).compare(this, otherType);
 	}
-	
+
 	/**
-	 * Enum of event type zoom levels.
+	 * An enumeration of the levels in the event type hierarchy.
 	 */
-	public enum TypeLevel {
+	public enum HierarchyLevel {
+
 		/**
-		 * The root event type zoom level. All event are the same type at this
-		 * level.
+		 * The root level of the event types hierarchy.
 		 */
-		ROOT_TYPE(getBundle().getString("EventTypeZoomLevel.rootType")),
+		ROOT(getBundle().getString("EventTypeHierarchyLevel.root")),
 		/**
-		 * The zoom level of base event types like files system, and web activity
+		 * The category level of the event types hierarchy. Event types at this
+		 * level represent event categories such as file system events and web
+		 * activity events.
 		 */
-		BASE_TYPE(getBundle().getString("EventTypeZoomLevel.baseType")),
+		CATEGORY(getBundle().getString("EventTypeHierarchyLevel.category")),
 		/**
-		 * The zoom level of specific type such as file modified time, or web
-		 * download.
+		 * The actual events level of the event types hierarchy. Event types at
+		 * this level represent actual events such as file modified time events
+		 * and web download events.
 		 */
-		SUB_TYPE(getBundle().getString("EventTypeZoomLevel.subType"));
+		EVENT(getBundle().getString("EventTypeHierarchyLevel.event"));
 
 		private final String displayName;
 
+		/**
+		 * Gets the display name of this element of the enumeration of the
+		 * levels in the event type hierarchy.
+		 *
+		 * @return The display name.
+		 */
 		public String getDisplayName() {
 			return displayName;
 		}
 
-		private TypeLevel(String displayName) {
+		/**
+		 * Constructs an element of the enumeration of the levels in the event
+		 * type hierarchy.
+		 *
+		 * @param displayName The display name of this hierarchy level.
+		 */
+		private HierarchyLevel(String displayName) {
 			this.displayName = displayName;
 		}
-	}
 
+	}
 
 	/**
 	 * The root type of all event types. No event should actually have this
@@ -133,36 +179,38 @@ private TypeLevel(String displayName) {
 	 */
 	TimelineEventType ROOT_EVENT_TYPE = new TimelineEventTypeImpl(0,
 			getBundle().getString("RootEventType.eventTypes.name"), // NON-NLS
-			TypeLevel.ROOT_TYPE, null) {
+			HierarchyLevel.ROOT, null) {
 		@Override
-		public SortedSet< TimelineEventType> getSubTypes() {
+		public SortedSet< TimelineEventType> getChildren() {
 			return ImmutableSortedSet.of(FILE_SYSTEM, WEB_ACTIVITY, MISC_TYPES, CUSTOM_TYPES);
 		}
 	};
 
 	TimelineEventType FILE_SYSTEM = new TimelineEventTypeImpl(1,
 			getBundle().getString("BaseTypes.fileSystem.name"),// NON-NLS
-			TypeLevel.BASE_TYPE, ROOT_EVENT_TYPE) {
+			HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) {
 		@Override
-		public SortedSet< TimelineEventType> getSubTypes() {
+		public SortedSet< TimelineEventType> getChildren() {
 			return ImmutableSortedSet.of(FILE_MODIFIED, FILE_ACCESSED,
 					FILE_CREATED, FILE_CHANGED);
 		}
 	};
+
 	TimelineEventType WEB_ACTIVITY = new TimelineEventTypeImpl(2,
 			getBundle().getString("BaseTypes.webActivity.name"), // NON-NLS
-			TypeLevel.BASE_TYPE, ROOT_EVENT_TYPE) {
+			HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) {
 		@Override
-		public SortedSet< TimelineEventType> getSubTypes() {
+		public SortedSet< TimelineEventType> getChildren() {
 			return ImmutableSortedSet.of(WEB_DOWNLOADS, WEB_COOKIE, WEB_BOOKMARK,
 					WEB_HISTORY, WEB_SEARCH, WEB_FORM_AUTOFILL, WEB_FORM_ADDRESSES);
 		}
 	};
+
 	TimelineEventType MISC_TYPES = new TimelineEventTypeImpl(3,
 			getBundle().getString("BaseTypes.miscTypes.name"), // NON-NLS
-			TypeLevel.BASE_TYPE, ROOT_EVENT_TYPE) {
+			HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) {
 		@Override
-		public SortedSet<TimelineEventType> getSubTypes() {
+		public SortedSet<TimelineEventType> getChildren() {
 			return ImmutableSortedSet.of(CALL_LOG, DEVICES_ATTACHED, EMAIL,
 					EXIF, GPS_ROUTE, GPS_TRACKPOINT, INSTALLED_PROGRAM, MESSAGE,
 					RECENT_DOCUMENTS, REGISTRY, LOG_ENTRY);
@@ -171,16 +219,19 @@ public SortedSet<TimelineEventType> getSubTypes() {
 
 	TimelineEventType FILE_MODIFIED = new FilePathEventType(4,
 			getBundle().getString("FileSystemTypes.fileModified.name"), // NON-NLS
-			TypeLevel.SUB_TYPE, FILE_SYSTEM);
+			HierarchyLevel.EVENT, FILE_SYSTEM);
+	
 	TimelineEventType FILE_ACCESSED = new FilePathEventType(5,
 			getBundle().getString("FileSystemTypes.fileAccessed.name"), // NON-NLS
-			TypeLevel.SUB_TYPE, FILE_SYSTEM);
+			HierarchyLevel.EVENT, FILE_SYSTEM);
+	
 	TimelineEventType FILE_CREATED = new FilePathEventType(6,
 			getBundle().getString("FileSystemTypes.fileCreated.name"), // NON-NLS
-			TypeLevel.SUB_TYPE, FILE_SYSTEM);
+			HierarchyLevel.EVENT, FILE_SYSTEM);
+	
 	TimelineEventType FILE_CHANGED = new FilePathEventType(7,
 			getBundle().getString("FileSystemTypes.fileChanged.name"), // NON-NLS
-			TypeLevel.SUB_TYPE, FILE_SYSTEM);
+			HierarchyLevel.EVENT, FILE_SYSTEM);
 
 	TimelineEventType WEB_DOWNLOADS = new URLArtifactEventType(8,
 			getBundle().getString("WebTypes.webDownloads.name"), // NON-NLS
@@ -188,24 +239,28 @@ public SortedSet<TimelineEventType> getSubTypes() {
 			new BlackboardArtifact.Type(TSK_WEB_DOWNLOAD),
 			new Type(TSK_DATETIME_ACCESSED),
 			new Type(TSK_URL));
+	
 	TimelineEventType WEB_COOKIE = new URLArtifactEventType(9,
 			getBundle().getString("WebTypes.webCookies.name"),// NON-NLS
 			WEB_ACTIVITY,
 			new BlackboardArtifact.Type(TSK_WEB_COOKIE),
 			new Type(TSK_DATETIME),
 			new Type(TSK_URL));
+	
 	TimelineEventType WEB_BOOKMARK = new URLArtifactEventType(10,
 			getBundle().getString("WebTypes.webBookmarks.name"), // NON-NLS
 			WEB_ACTIVITY,
 			new BlackboardArtifact.Type(TSK_WEB_BOOKMARK),
 			new Type(TSK_DATETIME_CREATED),
 			new Type(TSK_URL));
+	
 	TimelineEventType WEB_HISTORY = new URLArtifactEventType(11,
 			getBundle().getString("WebTypes.webHistory.name"), // NON-NLS
 			WEB_ACTIVITY,
 			new BlackboardArtifact.Type(TSK_WEB_HISTORY),
 			new Type(TSK_DATETIME_ACCESSED),
 			new Type(TSK_URL));
+	
 	TimelineEventType WEB_SEARCH = new URLArtifactEventType(12,
 			getBundle().getString("WebTypes.webSearch.name"), // NON-NLS
 			WEB_ACTIVITY,
@@ -226,11 +281,11 @@ public SortedSet<TimelineEventType> getSubTypes() {
 				final BlackboardAttribute subject = getAttributeSafe(artf, new Type(TSK_SUBJECT));
 				BlackboardAttribute phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER));
 				// Make our best effort to find a valid phoneNumber for the description
-				if( phoneNumber == null) {
+				if (phoneNumber == null) {
 					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_TO));
 				}
-				
-				if( phoneNumber == null) {
+
+				if (phoneNumber == null) {
 					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM));
 				}
 
@@ -281,13 +336,13 @@ public SortedSet<TimelineEventType> getSubTypes() {
 			new AttributeExtractor(new Type(TSK_NAME)),
 			artf -> {
 				BlackboardAttribute phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER));
-				if( phoneNumber == null) {
+				if (phoneNumber == null) {
 					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_TO));
 				}
-				if( phoneNumber == null) {
+				if (phoneNumber == null) {
 					phoneNumber = getAttributeSafe(artf, new Type(TSK_PHONE_NUMBER_FROM));
 				}
-				
+
 				return stringValueOf(phoneNumber);
 			},
 			new AttributeExtractor(new Type(TSK_DIRECTION)));
@@ -298,16 +353,22 @@ public SortedSet<TimelineEventType> getSubTypes() {
 			new BlackboardArtifact.Type(TSK_EMAIL_MSG),
 			new Type(TSK_DATETIME_SENT),
 			artf -> {
-				final BlackboardAttribute emailFrom = getAttributeSafe(artf, new Type(TSK_EMAIL_FROM));
-				final BlackboardAttribute emailTo = getAttributeSafe(artf, new Type(TSK_EMAIL_TO));
-				return stringValueOf(emailFrom) + " to " + stringValueOf(emailTo); // NON-NLS
+				String emailFrom = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_FROM)));
+				if (emailFrom.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) {
+					emailFrom = emailFrom.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX);
+				}
+				String emailTo = stringValueOf(getAttributeSafe(artf, new Type(TSK_EMAIL_TO)));
+				if (emailTo.length() > TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX) {
+					emailTo = emailTo.substring(0, TimelineEventArtifactTypeImpl.EMAIL_TO_FROM_LENGTH_MAX);
+				}
+				return emailFrom + " to " + emailTo; // NON-NLS
 			},
 			new AttributeExtractor(new Type(TSK_SUBJECT)),
 			artf -> {
 				final BlackboardAttribute msgAttribute = getAttributeSafe(artf, new Type(TSK_EMAIL_CONTENT_PLAIN));
 				String msg = stringValueOf(msgAttribute);
-				if (msg.length() > EMAIL_FULL_DESCRIPTION_LENGTH_MAX) {
-					msg = msg.substring(0, EMAIL_FULL_DESCRIPTION_LENGTH_MAX);
+				if (msg.length() > TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX) {
+					msg = msg.substring(0, TimelineEventArtifactTypeImpl.EMAIL_FULL_DESCRIPTION_LENGTH_MAX);
 				}
 				return msg;
 			});
@@ -350,9 +411,9 @@ public SortedSet<TimelineEventType> getSubTypes() {
 	//custom event type base type
 	TimelineEventType CUSTOM_TYPES = new TimelineEventTypeImpl(22,
 			getBundle().getString("BaseTypes.customTypes.name"), // NON-NLS
-			TypeLevel.BASE_TYPE, ROOT_EVENT_TYPE) {
+			HierarchyLevel.CATEGORY, ROOT_EVENT_TYPE) {
 		@Override
-		public SortedSet< TimelineEventType> getSubTypes() {
+		public SortedSet< TimelineEventType> getChildren() {
 			return ImmutableSortedSet.of(OTHER, USER_CREATED);
 		}
 	};
@@ -387,7 +448,7 @@ public SortedSet< TimelineEventType> getSubTypes() {
 			new BlackboardArtifact.Type(TSK_TL_EVENT),
 			new BlackboardAttribute.Type(TSK_DATETIME),
 			new BlackboardAttribute.Type(TSK_DESCRIPTION));
-	
+
 	TimelineEventType WEB_FORM_AUTOFILL = new TimelineEventArtifactTypeImpl(27,
 			getBundle().getString("WebTypes.webFormAutoFill.name"),//NON-NLS
 			WEB_ACTIVITY,
@@ -399,7 +460,7 @@ public SortedSet< TimelineEventType> getSubTypes() {
 				final BlackboardAttribute count = getAttributeSafe(artf, new Type(TSK_COUNT));
 				return stringValueOf(name) + ":" + stringValueOf(value) + " count: " + stringValueOf(count); // NON-NLS
 			}, new EmptyExtractor(), new EmptyExtractor());
-	
+
 	TimelineEventType WEB_FORM_ADDRESSES = new URLArtifactEventType(28,
 			getBundle().getString("WebTypes.webFormAddress.name"),//NON-NLS
 			WEB_ACTIVITY,
@@ -407,20 +468,20 @@ public SortedSet< TimelineEventType> getSubTypes() {
 			new Type(TSK_DATETIME_ACCESSED),
 			new Type(TSK_EMAIL));
 
-	static SortedSet<? extends TimelineEventType> getBaseTypes() {
-		return ROOT_EVENT_TYPE.getSubTypes();
+	static SortedSet<? extends TimelineEventType> getCategoryTypes() {
+		return ROOT_EVENT_TYPE.getChildren();
 	}
 
 	static SortedSet<? extends TimelineEventType> getFileSystemTypes() {
-		return FILE_SYSTEM.getSubTypes();
+		return FILE_SYSTEM.getChildren();
 	}
 
 	static SortedSet<? extends TimelineEventType> getWebActivityTypes() {
-		return WEB_ACTIVITY.getSubTypes();
+		return WEB_ACTIVITY.getChildren();
 	}
 
 	static SortedSet<? extends TimelineEventType> getMiscTypes() {
-		return MISC_TYPES.getSubTypes();
+		return MISC_TYPES.getChildren();
 	}
 
 	static String stringValueOf(BlackboardAttribute attr) {
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypeImpl.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypeImpl.java
index 4f0b228117ebf3aacc0785785142a12edaaeb370..08a63323d60ee1c878d83b12b8aae1ce098f5b9a 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypeImpl.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypeImpl.java
@@ -24,44 +24,42 @@
 import org.apache.commons.lang3.ObjectUtils;
 
 /**
- * Implementation of TimelineEventType for the standard predefined event types AND has package
- scope parsing methods.
+ * Implementation of TimelineEventType for the standard predefined event types.
  */
 class TimelineEventTypeImpl implements TimelineEventType {
 
 	private final long typeID;
 	private final String displayName;
 	private final TimelineEventType superType;
-	private final TimelineEventType.TypeLevel eventTypeZoomLevel;
+	private final TimelineEventType.HierarchyLevel eventTypeZoomLevel;
 
 	/**
-	 * 
-	 * @param typeID  ID (from the Database)
+	 *
+	 * @param typeID             ID (from the Database)
 	 * @param displayName
 	 * @param eventTypeZoomLevel Where it is in the type hierarchy
-	 * @param superType 
+	 * @param superType
 	 */
-	TimelineEventTypeImpl(long typeID, String displayName, TimelineEventType.TypeLevel eventTypeZoomLevel, TimelineEventType superType) {
+	TimelineEventTypeImpl(long typeID, String displayName, TimelineEventType.HierarchyLevel eventTypeZoomLevel, TimelineEventType superType) {
 		this.superType = superType;
 		this.typeID = typeID;
 		this.displayName = displayName;
 		this.eventTypeZoomLevel = eventTypeZoomLevel;
 	}
 
-	
 	TimelineEventDescription parseDescription(String fullDescriptionRaw, String medDescriptionRaw, String shortDescriptionRaw) {
 		// The standard/default implementation:  Just bundle the three description levels into one object.
 		return new TimelineEventDescription(fullDescriptionRaw, medDescriptionRaw, shortDescriptionRaw);
 	}
 
 	@Override
-	public SortedSet<? extends TimelineEventType> getSubTypes() {
+	public SortedSet<? extends TimelineEventType> getChildren() {
 		return ImmutableSortedSet.of();
 	}
 
 	@Override
-	public Optional<? extends TimelineEventType> getSubType(String string) {
-		return getSubTypes().stream()
+	public Optional<? extends TimelineEventType> getChild(String string) {
+		return getChildren().stream()
 				.filter(type -> type.getDisplayName().equalsIgnoreCase(displayName))
 				.findFirst();
 	}
@@ -72,13 +70,13 @@ public String getDisplayName() {
 	}
 
 	@Override
-	public TimelineEventType getSuperType() {
+	public TimelineEventType getParent() {
 		return ObjectUtils.defaultIfNull(superType, ROOT_EVENT_TYPE);
 
 	}
 
 	@Override
-	public TimelineEventType.TypeLevel getTypeLevel() {
+	public TimelineEventType.HierarchyLevel getTypeHierarchyLevel() {
 		return eventTypeZoomLevel;
 	}
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypes.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypes.java
index bd74899822b22ee493a33db7e9cf618d4de999e9..e2544d22855881d9ddd4d94db542c6edfc448f67 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypes.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineEventTypes.java
@@ -91,7 +91,7 @@ TimelineEventDescription parseDescription(String fullDescriptionRaw, String medD
 
 	static class FilePathEventType extends TimelineEventTypeImpl {
 
-		FilePathEventType(long typeID, String displayName, TimelineEventType.TypeLevel eventTypeZoomLevel, TimelineEventType superType) {
+		FilePathEventType(long typeID, String displayName, TimelineEventType.HierarchyLevel eventTypeZoomLevel, TimelineEventType superType) {
 			super(typeID, displayName, eventTypeZoomLevel, superType);
 		}
 
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineFilter.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineFilter.java
index c4d78a0eca4e6b24c5f070052ccc6a7daaa299b9..cec25c9d4461d706a7dd061d09fda032c84bfc52 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineFilter.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineFilter.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2018 Basis Technology Corp.
+ * Copyright 2018-2019 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -23,7 +23,6 @@
 import com.google.common.net.MediaType;
 import static java.util.Arrays.asList;
 import java.util.Collection;
-import java.util.Comparator;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Objects;
@@ -31,7 +30,9 @@
 import java.util.stream.Collectors;
 import static java.util.stream.Collectors.joining;
 import java.util.stream.Stream;
+import javafx.beans.property.BooleanProperty;
 import javafx.beans.property.Property;
+import javafx.beans.property.SimpleBooleanProperty;
 import javafx.beans.property.SimpleStringProperty;
 import javafx.collections.FXCollections;
 import javafx.collections.ObservableList;
@@ -40,15 +41,16 @@
 import static org.sleuthkit.datamodel.SleuthkitCase.escapeSingleQuotes;
 
 /**
- * Interface for timeline event filters. Filters are given to the
- * TimelineManager who interpretes them appropriately for all db queries.
+ * An interface for timeline events filters used to selectively query the
+ * timeline tables in the case database for timeline events via the APIs of the
+ * timeline manager.
  */
 public abstract class TimelineFilter {
 
 	/**
-	 * get the display name of this filter
+	 * Gets the display name for this filter.
 	 *
-	 * @return a name for this filter to show in the UI
+	 * @return The display name.
 	 */
 	public abstract String getDisplayName();
 
@@ -63,6 +65,11 @@ public abstract class TimelineFilter {
 	 */
 	abstract String getSQLWhere(TimelineManager manager);
 
+	/**
+	 * Makes a copy of this filter.
+	 *
+	 * @return A copy of this filter.
+	 */
 	public abstract TimelineFilter copyOf();
 
 	@SuppressWarnings("unchecked")
@@ -72,21 +79,28 @@ static <S extends TimelineFilter, T extends CompoundFilter<S>> T copySubFilters(
 	}
 
 	/**
-	 * Intersection (And) filter
+	 * A timeline events filter that ANDs together a collection of timeline
+	 * event filters.
 	 *
-	 * @param <S> The type of sub Filters in this IntersectionFilter.
+	 * @param <SubFilterType> The type of the filters to be AND'ed together.
 	 */
-	public static class IntersectionFilter<S extends TimelineFilter> extends CompoundFilter<S> {
+	public static class IntersectionFilter<SubFilterType extends TimelineFilter> extends CompoundFilter<SubFilterType> {
 
+		/**
+		 * Constructs timeline events filter that ANDs together a collection of
+		 * timeline events filters.
+		 *
+		 * @param subFilters The collection of filters to be AND'ed together.
+		 */
 		@VisibleForTesting
-		public IntersectionFilter(List<S> subFilters) {
+		public IntersectionFilter(List<SubFilterType> subFilters) {
 			super(subFilters);
 		}
 
 		@Override
-		public IntersectionFilter<S> copyOf() {
+		public IntersectionFilter<SubFilterType> copyOf() {
 			@SuppressWarnings("unchecked")
-			List<S> subfilters = Lists.transform(getSubFilters(), f -> (S) f.copyOf()); //make copies of all the subfilters.
+			List<SubFilterType> subfilters = Lists.transform(getSubFilters(), f -> (SubFilterType) f.copyOf()); //make copies of all the subfilters.
 			return new IntersectionFilter<>(subfilters);
 		}
 
@@ -108,68 +122,83 @@ String getSQLWhere(TimelineManager manager) {
 					.collect(Collectors.joining(" AND "));
 			return join.isEmpty() ? trueLiteral : "(" + join + ")";
 		}
+
 	}
 
 	/**
-	 * Event Type Filter. An instance of EventTypeFilter is usually a tree that
-	 * parallels the event type hierarchy with one filter/node for each event
-	 * type.
+	 * A timeline events filter used to query for a subset of the event types in
+	 * the event types hierarchy. The filter is built via a recursive descent
+	 * from any given type in the hierarchy, effectively creating a filter that
+	 * accepts the events in a branch of the event types hierarchy.
 	 */
 	public static final class EventTypeFilter extends UnionFilter<EventTypeFilter> {
 
-		/**
-		 * the event type this filter passes
-		 */
-		private final TimelineEventType eventType;
+		private final TimelineEventType rootEventType;
 
 		/**
-		 * private constructor that enables non recursive/tree construction of
-		 * the filter hierarchy for use in EventTypeFilter.copyOf().
+		 * Constucts a timeline events filter used to query for a subset of the
+		 * event types in the event types hierarchy. The filter is optionally
+		 * built via a recursive descent from any given type in the hierarchy,
+		 * effectively creating a filter that accepts the events in a branch of
+		 * the event types hierarchy. Thsi constructor exists solely for the use
+		 * of this filter's implementation of the copyOf API.
 		 *
-		 * @param eventType the event type this filter passes
-		 * @param recursive true if subfilters should be added for each subtype.
-		 *                  False if no subfilters should be added.
+		 * @param rootEventType The "root" of the event hierarchy for the
+		 *                      purposes of this filter.
+		 * @param recursive     Whether or not to do a recursive descent of the
+		 *                      event types hierarchy from the root event type.
 		 */
-		private EventTypeFilter(TimelineEventType eventType, boolean recursive) {
+		private EventTypeFilter(TimelineEventType rootEventType, boolean recursive) {
 			super(FXCollections.observableArrayList());
-			this.eventType = eventType;
+			this.rootEventType = rootEventType;
 			if (recursive) {
 				// add subfilters for each subtype
-				for (TimelineEventType subType : eventType.getSubTypes()) {
+				for (TimelineEventType subType : rootEventType.getChildren()) {
 					addSubFilter(new EventTypeFilter(subType));
 				}
 			}
 		}
 
 		/**
-		 * public constructor. creates a subfilter for each subtype of the given
-		 * event type
+		 * Constructs a timeline events filter used to query for a subset of the
+		 * event types in the event types hierarchy. The subset of event types
+		 * that pass the filter is determined by a recursive descent from any
+		 * given type in the hierarchy, effectively creating a filter that
+		 * accepts the events in a branch of the event types hierarchy.
 		 *
-		 * @param eventType the event type this filter will pass
+		 * @param rootEventType The "root" of the event hierarchy for the
+		 *                      purposes of this filter.
 		 */
-		public EventTypeFilter(TimelineEventType eventType) {
-			this(eventType, true);
+		public EventTypeFilter(TimelineEventType rootEventType) {
+			this(rootEventType, true);
 		}
 
-		public TimelineEventType getEventType() {
-			return eventType;
+		/**
+		 * Gets the "root" of the branch of the event types hierarchy accepted
+		 * by this filter.
+		 *
+		 * @return The "root" event type.
+		 */
+		public TimelineEventType getRootEventType() {
+			return rootEventType;
 		}
 
 		@Override
 		public String getDisplayName() {
-			return (TimelineEventType.ROOT_EVENT_TYPE.equals(eventType)) ? BundleProvider.getBundle().getString("TypeFilter.displayName.text") : eventType.getDisplayName();
+			return (TimelineEventType.ROOT_EVENT_TYPE.equals(rootEventType)) ? BundleProvider.getBundle().getString("TypeFilter.displayName.text") : rootEventType.getDisplayName();
 		}
 
 		@Override
 		public EventTypeFilter copyOf() {
 			//make a nonrecursive copy of this filter, and then copy subfilters
-			return copySubFilters(this, new EventTypeFilter(eventType, false));
+			// RC (10/1/19): Why?
+			return copySubFilters(this, new EventTypeFilter(rootEventType, false));
 		}
 
 		@Override
 		public int hashCode() {
 			int hash = 7;
-			hash = 17 * hash + Objects.hashCode(this.eventType);
+			hash = 17 * hash + Objects.hashCode(this.rootEventType);
 			return hash;
 		}
 
@@ -185,7 +214,7 @@ public boolean equals(Object obj) {
 				return false;
 			}
 			final EventTypeFilter other = (EventTypeFilter) obj;
-			if (notEqual(this.eventType, other.eventType)) {
+			if (notEqual(this.rootEventType, other.getRootEventType())) {
 				return false;
 			}
 			return Objects.equals(this.getSubFilters(), other.getSubFilters());
@@ -198,7 +227,7 @@ String getSQLWhere(TimelineManager manager) {
 
 		private Stream<String> getSubTypeIDs() {
 			if (this.getSubFilters().isEmpty()) {
-				return Stream.of(String.valueOf(getEventType().getTypeID()));
+				return Stream.of(String.valueOf(getRootEventType().getTypeID()));
 			} else {
 				return this.getSubFilters().stream().flatMap(EventTypeFilter::getSubTypeIDs);
 			}
@@ -206,16 +235,61 @@ private Stream<String> getSubTypeIDs() {
 
 		@Override
 		public String toString() {
-			return "EventTypeFilter{" + "eventType=" + eventType + ", subfilters=" + getSubFilters() + '}';
+			return "EventTypeFilter{" + "rootEventType=" + rootEventType + ", subfilters=" + getSubFilters() + '}';
 		}
 
 	}
 
 	/**
-	 * Filter to show only events tag with the tagNames of the selected
-	 * subfilters.
+	 * A timeline events filter used to query for events where the direct source
+	 * (file or artifact) of the events has either been tagged or not tagged.
 	 */
-	public static final class TagsFilter extends UnionFilter<TagNameFilter> {
+	public static final class TagsFilter extends TimelineFilter {
+
+		private final BooleanProperty eventSourcesAreTagged = new SimpleBooleanProperty();
+
+		/**
+		 * Constructs a timeline events filter used to query for a events where
+		 * the direct source (file or artifact) of the events has not been
+		 * tagged.
+		 */
+		public TagsFilter() {
+		}
+
+		/**
+		 * Constructs a timeline events filter used to query for events where
+		 * the direct source (file or artifact) of the events has either been
+		 * tagged or not tagged.
+		 *
+		 * @param eventSourceIsTagged Whether the direct sources of the events
+		 *                            need to be tagged or not tagged to be
+		 *                            accepted by this filter.
+		 */
+		public TagsFilter(boolean eventSourceIsTagged) {
+			this.eventSourcesAreTagged.set(eventSourceIsTagged);
+		}
+
+		/**
+		 * Sets whether the direct sources of the events have to be tagged or
+		 * not tagged to be accepted by this filter.
+		 *
+		 * @param eventSourceIsTagged Whether the direct sources of the events
+		 *                            have to be tagged or not tagged to be
+		 *                            accepted by this filter.
+		 */
+		public synchronized void setEventSourcesAreTagged(boolean eventSourceIsTagged) {
+			this.eventSourcesAreTagged.set(eventSourceIsTagged);
+		}
+
+		/**
+		 * Indicates whether the direct sources of the events have to be tagged
+		 * or not tagged.
+		 *
+		 * @return True or false.
+		 */
+		public synchronized boolean getEventSourceAreTagged() {
+			return eventSourcesAreTagged.get();
+		}
 
 		@Override
 		public String getDisplayName() {
@@ -224,19 +298,44 @@ public String getDisplayName() {
 
 		@Override
 		public TagsFilter copyOf() {
-			return copySubFilters(this, new TagsFilter());
+			return new TagsFilter(eventSourcesAreTagged.get());
 		}
 
-		public void removeFilterForTag(TagName tagName) {
-			getSubFilters().removeIf(subfilter -> subfilter.getTagName().equals(tagName));
-			getSubFilters().sort(Comparator.comparing(TagNameFilter::getDisplayName));
+		@Override
+		public boolean equals(Object obj) {
+			if (obj == null || !(obj instanceof TagsFilter)) {
+				return false;
+			}
+
+			return ((TagsFilter) obj).getEventSourceAreTagged() == eventSourcesAreTagged.get();
 		}
+
+		@Override
+		public int hashCode() {
+			int hash = 7;
+			hash = 67 * hash + Objects.hashCode(this.eventSourcesAreTagged);
+			return hash;
+		}
+
+		@Override
+		String getSQLWhere(TimelineManager manager) {
+			String whereStr;
+			if (eventSourcesAreTagged.get()) {
+				whereStr = "tagged = 1";
+			} else {
+				whereStr = "tagged = 0";
+			}
+
+			return whereStr;
+		}
+
 	}
 
 	/**
-	 * Union(or) filter
+	 * A timeline events filter that ORs together a collection of timeline
+	 * events filters.
 	 *
-	 * @param <SubFilterType> The type of the subfilters.
+	 * @param <SubFilterType> The type of the filters to be OR'ed together.
 	 */
 	public static abstract class UnionFilter<SubFilterType extends TimelineFilter> extends TimelineFilter.CompoundFilter<SubFilterType> {
 
@@ -264,23 +363,43 @@ String getSQLWhere(TimelineManager manager) {
 	}
 
 	/**
-	 * Filter for text matching
+	 * A timeline events filter used to query for events that have a particular
+	 * substring in their short, medium, or full descriptions.
 	 */
 	public static final class TextFilter extends TimelineFilter {
 
-		private final SimpleStringProperty textProperty = new SimpleStringProperty();
+		private final SimpleStringProperty descriptionSubstring = new SimpleStringProperty();
 
+		/**
+		 * Constructs a timeline events filter used to query for events that
+		 * have the empty string as a substring in their short, medium, or full
+		 * descriptions.
+		 */
 		public TextFilter() {
 			this("");
 		}
 
-		public TextFilter(String text) {
+		/**
+		 * Constructs a timeline events filter used to query for events that
+		 * have a given substring in their short, medium, or full descriptions.
+		 *
+		 * @param descriptionSubstring The substring that must be present in one
+		 *                             or more of the descriptions of each event
+		 *                             that passes the filter.
+		 */
+		public TextFilter(String descriptionSubstring) {
 			super();
-			this.textProperty.set(text.trim());
+			this.descriptionSubstring.set(descriptionSubstring.trim());
 		}
 
-		public synchronized void setText(String text) {
-			this.textProperty.set(text.trim());
+		/**
+		 * Sets the substring that must be present in one or more of the
+		 * descriptions of each event that passes the filter.
+		 *
+		 * @param descriptionSubstring The substring.
+		 */
+		public synchronized void setDescriptionSubstring(String descriptionSubstring) {
+			this.descriptionSubstring.set(descriptionSubstring.trim());
 		}
 
 		@Override
@@ -288,17 +407,29 @@ public String getDisplayName() {
 			return BundleProvider.getBundle().getString("TextFilter.displayName.text");
 		}
 
-		public synchronized String getText() {
-			return textProperty.getValue();
+		/**
+		 * Gets the substring that must be present in one or more of the
+		 * descriptions of each event that passes the filter.
+		 *
+		 * @return The required substring.
+		 */
+		public synchronized String getSubstring() {
+			return descriptionSubstring.getValue();
 		}
 
-		public Property<String> textProperty() {
-			return textProperty;
+		/**
+		 * Gets the substring that must be present in one or more of the
+		 * descriptions of each event that passes the filter.
+		 *
+		 * @return The required substring as a Property.
+		 */
+		public Property<String> substringProperty() {
+			return descriptionSubstring;
 		}
 
 		@Override
 		public synchronized TextFilter copyOf() {
-			return new TextFilter(getText());
+			return new TextFilter(getSubstring());
 		}
 
 		@Override
@@ -310,22 +441,22 @@ public boolean equals(Object obj) {
 				return false;
 			}
 			final TextFilter other = (TextFilter) obj;
-			return Objects.equals(getText(), other.getText());
+			return Objects.equals(getSubstring(), other.getSubstring());
 		}
 
 		@Override
 		public int hashCode() {
 			int hash = 5;
-			hash = 29 * hash + Objects.hashCode(this.textProperty.get());
+			hash = 29 * hash + Objects.hashCode(this.descriptionSubstring.get());
 			return hash;
 		}
 
 		@Override
 		String getSQLWhere(TimelineManager manager) {
-			if (StringUtils.isNotBlank(this.getText())) {
-				return "((med_description like '%" + escapeSingleQuotes(this.getText()) + "%')" //NON-NLS
-						+ " or (full_description like '%" + escapeSingleQuotes(this.getText()) + "%')" //NON-NLS
-						+ " or (short_description like '%" + escapeSingleQuotes(this.getText()) + "%'))"; //NON-NLS
+			if (StringUtils.isNotBlank(this.getSubstring())) {
+				return "((med_description like '%" + escapeSingleQuotes(this.getSubstring()) + "%')" //NON-NLS
+						+ " or (full_description like '%" + escapeSingleQuotes(this.getSubstring()) + "%')" //NON-NLS
+						+ " or (short_description like '%" + escapeSingleQuotes(this.getSubstring()) + "%'))"; //NON-NLS
 			} else {
 				return manager.getSQLWhere(null);
 			}
@@ -333,109 +464,174 @@ String getSQLWhere(TimelineManager manager) {
 
 		@Override
 		public String toString() {
-			return "TextFilter{" + "textProperty=" + textProperty.getValue() + '}';
+			return "TextFilter{" + "textProperty=" + descriptionSubstring.getValue() + '}';
 		}
 
 	}
 
 	/**
-	 * An implementation of IntersectionFilter designed to be used as the root
-	 * of a filter tree. provides named access to specific subfilters.
+	 * A timeline events filter that ANDs together instances of a variety of
+	 * event filter types to create what is in effect a "tree" of filters.
 	 */
 	public static final class RootFilter extends IntersectionFilter<TimelineFilter> {
 
-		private final HideKnownFilter knownFilter;
+		private final HideKnownFilter knownFilesFilter;
 		private final TagsFilter tagsFilter;
-		private final HashHitsFilter hashFilter;
-		private final TextFilter textFilter;
-		private final EventTypeFilter typeFilter;
+		private final HashHitsFilter hashSetHitsFilter;
+		private final TextFilter descriptionSubstringFilter;
+		private final EventTypeFilter eventTypesFilter;
 		private final DataSourcesFilter dataSourcesFilter;
 		private final FileTypesFilter fileTypesFilter;
-		private final Set<TimelineFilter> namedSubFilters = new HashSet<>();
+		private final Set<TimelineFilter> additionalFilters = new HashSet<>();
 
+		/**
+		 * Get the data sources filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public DataSourcesFilter getDataSourcesFilter() {
 			return dataSourcesFilter;
 		}
 
+		/**
+		 * Gets the tagged events sources filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public TagsFilter getTagsFilter() {
 			return tagsFilter;
 		}
 
+		/**
+		 * Gets the source file hash set hits filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public HashHitsFilter getHashHitsFilter() {
-			return hashFilter;
+			return hashSetHitsFilter;
 		}
 
+		/**
+		 * Gets the event types filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public EventTypeFilter getEventTypeFilter() {
-			return typeFilter;
+			return eventTypesFilter;
 		}
 
+		/**
+		 * Gets the exclude known source files filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public HideKnownFilter getKnownFilter() {
-			return knownFilter;
+			return knownFilesFilter;
 		}
 
+		/**
+		 * Gets the description substring filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public TextFilter getTextFilter() {
-			return textFilter;
+			return descriptionSubstringFilter;
 		}
 
+		/**
+		 * Gets the source file types filter of this filter.
+		 *
+		 * @return The filter.
+		 */
 		public FileTypesFilter getFileTypesFilter() {
 			return fileTypesFilter;
 		}
 
-		public RootFilter(HideKnownFilter knownFilter, TagsFilter tagsFilter, HashHitsFilter hashFilter,
-				TextFilter textFilter, EventTypeFilter typeFilter, DataSourcesFilter dataSourcesFilter,
-				FileTypesFilter fileTypesFilter, Collection<TimelineFilter> annonymousSubFilters) {
-			super(FXCollections.observableArrayList(textFilter, knownFilter, tagsFilter, dataSourcesFilter, hashFilter, fileTypesFilter, typeFilter));
-
+		/**
+		 * Constructs a timeline events filter that ANDs together instances of a
+		 * variety of event filter types to create what is in effect a "tree" of
+		 * filters.
+		 *
+		 * @param knownFilesFilter           A filter that excludes events with
+		 *                                   knwon file event sources.
+		 * @param tagsFilter                 A filter that exludes or includes
+		 *                                   events with tagged event sources.
+		 * @param hashSetHitsFilter          A filter that excludes or includes
+		 *                                   events with event sources that have
+		 *                                   hash set hits.
+		 * @param descriptionSubstringFilter A filter that requires a substring
+		 *                                   to be present in the event
+		 *                                   description.
+		 * @param eventTypesFilter           A filter that accepts events of
+		 *                                   specified events types.
+		 * @param dataSourcesFilter          A filter that accepts events
+		 *                                   associated with a specified subset
+		 *                                   of data sources.
+		 * @param fileTypesFilter            A filter that includes or excludes
+		 *                                   events with source files of
+		 *                                   particular media types.
+		 * @param additionalFilters          Additional filters.
+		 */
+		public RootFilter(
+				HideKnownFilter knownFilesFilter,
+				TagsFilter tagsFilter,
+				HashHitsFilter hashSetHitsFilter,
+				TextFilter descriptionSubstringFilter,
+				EventTypeFilter eventTypesFilter,
+				DataSourcesFilter dataSourcesFilter,
+				FileTypesFilter fileTypesFilter,
+				Collection<TimelineFilter> additionalFilters) {
+
+			super(FXCollections.observableArrayList(descriptionSubstringFilter, knownFilesFilter, tagsFilter, dataSourcesFilter, hashSetHitsFilter, fileTypesFilter, eventTypesFilter));
 			getSubFilters().removeIf(Objects::isNull);
-			this.knownFilter = knownFilter;
+			this.knownFilesFilter = knownFilesFilter;
 			this.tagsFilter = tagsFilter;
-			this.hashFilter = hashFilter;
-			this.textFilter = textFilter;
-			this.typeFilter = typeFilter;
+			this.hashSetHitsFilter = hashSetHitsFilter;
+			this.descriptionSubstringFilter = descriptionSubstringFilter;
+			this.eventTypesFilter = eventTypesFilter;
 			this.dataSourcesFilter = dataSourcesFilter;
 			this.fileTypesFilter = fileTypesFilter;
-
-			namedSubFilters.addAll(asList(textFilter, knownFilter, tagsFilter, dataSourcesFilter, hashFilter, fileTypesFilter, typeFilter));
-			namedSubFilters.removeIf(Objects::isNull);
-			annonymousSubFilters.stream().
+			this.additionalFilters.addAll(asList(descriptionSubstringFilter, knownFilesFilter, tagsFilter, dataSourcesFilter, hashSetHitsFilter, fileTypesFilter, eventTypesFilter));
+			this.additionalFilters.removeIf(Objects::isNull);
+			additionalFilters.stream().
 					filter(Objects::nonNull).
-					filter(this::isNotNamedSubFilter).
+					filter(this::hasAdditionalFilter).
 					map(TimelineFilter::copyOf).
 					forEach(anonymousFilter -> getSubFilters().add(anonymousFilter));
 		}
 
 		@Override
 		public RootFilter copyOf() {
-			Set<TimelineFilter> annonymousSubFilters = getSubFilters().stream()
-					.filter(this::isNotNamedSubFilter)
+			Set<TimelineFilter> subFilters = getSubFilters().stream()
+					.filter(this::hasAdditionalFilter)
 					.map(TimelineFilter::copyOf)
 					.collect(Collectors.toSet());
-			return new RootFilter(knownFilter.copyOf(), tagsFilter.copyOf(),
-					hashFilter.copyOf(), textFilter.copyOf(), typeFilter.copyOf(),
-					dataSourcesFilter.copyOf(), fileTypesFilter.copyOf(), annonymousSubFilters);
+			return new RootFilter(knownFilesFilter.copyOf(), tagsFilter.copyOf(),
+					hashSetHitsFilter.copyOf(), descriptionSubstringFilter.copyOf(), eventTypesFilter.copyOf(),
+					dataSourcesFilter.copyOf(), fileTypesFilter.copyOf(), subFilters);
 
 		}
 
-		private boolean isNotNamedSubFilter(TimelineFilter subFilter) {
-			return !(namedSubFilters.contains(subFilter));
+		private boolean hasAdditionalFilter(TimelineFilter subFilter) {
+			return !(additionalFilters.contains(subFilter));
 		}
 
 		@Override
 		public String toString() {
-			return "RootFilter{" + "knownFilter=" + knownFilter + ", tagsFilter=" + tagsFilter + ", hashFilter=" + hashFilter + ", textFilter=" + textFilter + ", typeFilter=" + typeFilter + ", dataSourcesFilter=" + dataSourcesFilter + ", fileTypesFilter=" + fileTypesFilter + ", namedSubFilters=" + namedSubFilters + '}';
+			return "RootFilter{" + "knownFilter=" + knownFilesFilter + ", tagsFilter=" + tagsFilter + ", hashFilter=" + hashSetHitsFilter + ", textFilter=" + descriptionSubstringFilter + ", typeFilter=" + eventTypesFilter + ", dataSourcesFilter=" + dataSourcesFilter + ", fileTypesFilter=" + fileTypesFilter + ", namedSubFilters=" + additionalFilters + '}';
 		}
 
 		@Override
 		public int hashCode() {
 			int hash = 7;
-			hash = 17 * hash + Objects.hashCode(this.knownFilter);
+			hash = 17 * hash + Objects.hashCode(this.knownFilesFilter);
 			hash = 17 * hash + Objects.hashCode(this.tagsFilter);
-			hash = 17 * hash + Objects.hashCode(this.hashFilter);
-			hash = 17 * hash + Objects.hashCode(this.textFilter);
-			hash = 17 * hash + Objects.hashCode(this.typeFilter);
+			hash = 17 * hash + Objects.hashCode(this.hashSetHitsFilter);
+			hash = 17 * hash + Objects.hashCode(this.descriptionSubstringFilter);
+			hash = 17 * hash + Objects.hashCode(this.eventTypesFilter);
 			hash = 17 * hash + Objects.hashCode(this.dataSourcesFilter);
 			hash = 17 * hash + Objects.hashCode(this.fileTypesFilter);
-			hash = 17 * hash + Objects.hashCode(this.namedSubFilters);
+			hash = 17 * hash + Objects.hashCode(this.additionalFilters);
 			return hash;
 		}
 
@@ -451,35 +647,36 @@ public boolean equals(Object obj) {
 				return false;
 			}
 			final RootFilter other = (RootFilter) obj;
-			if (notEqual(this.knownFilter, other.knownFilter)) {
+			if (notEqual(this.knownFilesFilter, other.getKnownFilter())) {
 				return false;
 			}
-			if (notEqual(this.tagsFilter, other.tagsFilter)) {
+			if (notEqual(this.tagsFilter, other.getTagsFilter())) {
 				return false;
 			}
-			if (notEqual(this.hashFilter, other.hashFilter)) {
+			if (notEqual(this.hashSetHitsFilter, other.getHashHitsFilter())) {
 				return false;
 			}
-			if (notEqual(this.textFilter, other.textFilter)) {
+			if (notEqual(this.descriptionSubstringFilter, other.getTextFilter())) {
 				return false;
 			}
-			if (notEqual(this.typeFilter, other.typeFilter)) {
+			if (notEqual(this.eventTypesFilter, other.getEventTypeFilter())) {
 				return false;
 			}
-			if (notEqual(this.dataSourcesFilter, other.dataSourcesFilter)) {
+			if (notEqual(this.dataSourcesFilter, other.getDataSourcesFilter())) {
 				return false;
 			}
 
-			if (notEqual(this.fileTypesFilter, other.fileTypesFilter)) {
+			if (notEqual(this.fileTypesFilter, other.getFileTypesFilter())) {
 				return false;
 			}
-			return Objects.equals(this.namedSubFilters, other.namedSubFilters);
+			return Objects.equals(this.additionalFilters, other.getSubFilters());
 		}
 
 	}
 
 	/**
-	 * Filter to hide known files
+	 * A timeline events filter used to filter out events that have a direct or
+	 * indirect event source that is a known file.
 	 */
 	public static final class HideKnownFilter extends TimelineFilter {
 
@@ -488,10 +685,6 @@ public String getDisplayName() {
 			return BundleProvider.getBundle().getString("hideKnownFilter.displayName.text");
 		}
 
-		public HideKnownFilter() {
-			super();
-		}
-
 		@Override
 		public HideKnownFilter copyOf() {
 			return new HideKnownFilter();
@@ -519,11 +712,13 @@ String getSQLWhere(TimelineManager manager) {
 		public String toString() {
 			return "HideKnownFilter{" + '}';
 		}
+
 	}
 
 	/**
-	 * A Filter with a collection of sub-filters. Concrete implementations can
-	 * decide how to combine the sub-filters.
+	 * A timeline events filter composed of a collection of event filters.
+	 * Concrete implementations can decide how to combine the filters in the
+	 * collection.
 	 *
 	 * @param <SubFilterType> The type of the subfilters.
 	 */
@@ -535,23 +730,31 @@ protected void addSubFilter(SubFilterType subfilter) {
 			}
 		}
 
-		/**
-		 * The list of sub-filters that make up this filter
-		 */
 		private final ObservableList<SubFilterType> subFilters = FXCollections.observableArrayList();
 
+		/**
+		 * Gets the collection of filters that make up this filter.
+		 *
+		 * @return The filters.
+		 */
 		public final ObservableList<SubFilterType> getSubFilters() {
 			return subFilters;
 		}
 
+		/**
+		 * Indicates whether or not this filter has subfilters.
+		 *
+		 * @return True or false.
+		 */
 		public boolean hasSubFilters() {
 			return getSubFilters().isEmpty() == false;
 		}
 
 		/**
-		 * construct a compound filter from a list of other filters to combine.
+		 * Constructs a timeline events filter composed of a collection of event
+		 * filters.
 		 *
-		 * @param subFilters
+		 * @param subFilters The collection of filters.
 		 */
 		protected CompoundFilter(List<SubFilterType> subFilters) {
 			super();
@@ -591,73 +794,39 @@ public String toString() {
 	}
 
 	/**
-	 * Filter for an individual hash set
-	 */
-	static public final class HashSetFilter extends TimelineFilter {
-
-		private final String hashSetName;
-
-		public String getHashSetName() {
-			return hashSetName;
-		}
-
-		public HashSetFilter(String hashSetName) {
-			super();
-			this.hashSetName = hashSetName;
-		}
-
-		@Override
-		public synchronized HashSetFilter copyOf() {
-			return new HashSetFilter(getHashSetName());
-		}
-
-		@Override
-		public String getDisplayName() {
-			return hashSetName;
-		}
-
-		@Override
-		public int hashCode() {
-			int hash = 7;
-			hash = 79 * hash + Objects.hashCode(this.hashSetName);
-			return hash;
-		}
-
-		@Override
-		public boolean equals(Object obj) {
-			if (obj == null) {
-				return false;
-			}
-			if (getClass() != obj.getClass()) {
-				return false;
-			}
-			final HashSetFilter other = (HashSetFilter) obj;
-			return Objects.equals(this.hashSetName, other.hashSetName);
-		}
-
-		@Override
-		String getSQLWhere(TimelineManager manager) {
-			return "(hash_set_name = '" + escapeSingleQuotes(getHashSetName()) + "' )"; //NON-NLS
-		}
-
-	}
-
-	/**
-	 * Filter for an individual datasource
+	 * A timeline events filter used to query for events associated with a given
+	 * data source.
 	 */
 	public static final class DataSourceFilter extends TimelineFilter {
 
 		private final String dataSourceName;
 		private final long dataSourceID;
 
+		/**
+		 * Gets the object ID of the specified data source.
+		 *
+		 * @return The data source object ID.
+		 */
 		public long getDataSourceID() {
 			return dataSourceID;
 		}
 
+		/**
+		 * Gets the display name of the specified data source.
+		 *
+		 * @return The data source display name.
+		 */
 		public String getDataSourceName() {
 			return dataSourceName;
 		}
 
+		/**
+		 * Constructs a timeline events filter used to query for events
+		 * associated with a given data source.
+		 *
+		 * @param dataSourceName The data source display name.
+		 * @param dataSourceID   The data source object ID.
+		 */
 		public DataSourceFilter(String dataSourceName, long dataSourceID) {
 			super();
 			this.dataSourceName = dataSourceName;
@@ -708,89 +877,101 @@ String getSQLWhere(TimelineManager manager) {
 	}
 
 	/**
-	 * Filter for an individual TagName
+	 * A timeline events filter used to query for events where the files that
+	 * are the direct or indirect sources of the events either have or do not
+	 * have hash set hits.
+	 *
 	 */
-	static public final class TagNameFilter extends TimelineFilter {
+	public static final class HashHitsFilter extends TimelineFilter {
 
-		private final TagName tagName;
+		private final BooleanProperty eventSourcesHaveHashSetHits = new SimpleBooleanProperty();
 
-		public TagNameFilter(TagName tagName) {
-			super();
-			this.tagName = tagName;
+		/**
+		 * Constructs a timeline events filter used to query for events where
+		 * the files that are the direct or indirect sources of the events
+		 * either do not have hash set hits.
+		 */
+		public HashHitsFilter() {
 		}
 
-		public TagName getTagName() {
-			return tagName;
+		/**
+		 * Constructs a timeline events filter used to query for events where
+		 * the files that are the direct or indirect sources of the events
+		 * either have or do not have hash set hits.
+		 *
+		 * @param hasHashHit Whether or not the files associated with the events
+		 *                   have or do not have hash set hits.
+		 */
+		public HashHitsFilter(boolean hasHashHit) {
+			eventSourcesHaveHashSetHits.set(hasHashHit);
 		}
 
-		@Override
-		public synchronized TagNameFilter copyOf() {
-			return new TagNameFilter(getTagName());
+		/**
+		 * Sets whether or not the files associated with the events have or do
+		 * not have hash set hits
+		 *
+		 * @param hasHashHit True or false.
+		 */
+		public synchronized void setEventSourcesHaveHashSetHits(boolean hasHashHit) {
+			eventSourcesHaveHashSetHits.set(hasHashHit);
+		}
+
+		/**
+		 * Indicates whether or not the files associated with the events have or
+		 * do not have hash set hits
+		 *
+		 * @return True or false.
+		 */
+		public synchronized boolean getEventSourcesHaveHashSetHits() {
+			return eventSourcesHaveHashSetHits.get();
 		}
 
 		@Override
 		public String getDisplayName() {
-			return tagName.getDisplayName();
+			return BundleProvider.getBundle().getString("hashHitsFilter.displayName.text");
 		}
 
 		@Override
-		public int hashCode() {
-			int hash = 3;
-			hash = 73 * hash + Objects.hashCode(this.tagName);
-			return hash;
+		public HashHitsFilter copyOf() {
+			return new HashHitsFilter(eventSourcesHaveHashSetHits.get());
 		}
 
 		@Override
 		public boolean equals(Object obj) {
-			if (this == obj) {
-				return true;
-			}
-			if (obj == null) {
+			if (obj == null || !(obj instanceof HashHitsFilter)) {
 				return false;
 			}
-			if (getClass() != obj.getClass()) {
-				return false;
-			}
-			final TagNameFilter other = (TagNameFilter) obj;
-			return Objects.equals(this.tagName, other.tagName);
-		}
 
-		@Override
-		String getSQLWhere(TimelineManager manager) {
-			return " (tsk_events.tag_name_id = " + getTagName().getId() + " ) "; //NON-NLS
+			return ((HashHitsFilter) obj).getEventSourcesHaveHashSetHits() == eventSourcesHaveHashSetHits.get();
 		}
 
 		@Override
-		public String toString() {
-			return "TagNameFilter{" + "tagName=" + tagName + '}';
+		public int hashCode() {
+			int hash = 7;
+			hash = 67 * hash + Objects.hashCode(this.eventSourcesHaveHashSetHits);
+			return hash;
 		}
 
-	}
-
-	/**
-	 *
-	 */
-	static public final class HashHitsFilter extends UnionFilter<HashSetFilter> {
-
 		@Override
-		public String getDisplayName() {
-			return BundleProvider.getBundle().getString("hashHitsFilter.displayName.text");
-		}
+		String getSQLWhere(TimelineManager manager) {
+			String whereStr = "";
+			if (eventSourcesHaveHashSetHits.get()) {
+				whereStr = "hash_hit = 1";
+			} else {
+				whereStr = "hash_hit = 0";
+			}
 
-		@Override
-		public HashHitsFilter copyOf() {
-			return copySubFilters(this, new HashHitsFilter());
+			return whereStr;
 		}
+
 	}
 
 	/**
-	 * union of DataSourceFilters
+	 * A timeline events filter used to query for events associated with a given
+	 * subset of data sources. The filter is a union of one or more single data
+	 * source filters.
 	 */
-	static public final class DataSourcesFilter extends UnionFilter< DataSourceFilter> {
-
-		public DataSourcesFilter() {
-			super();
-		}
+	static public final class DataSourcesFilter extends UnionFilter<DataSourceFilter> {
 
 		@Override
 		public DataSourcesFilter copyOf() {
@@ -801,10 +982,13 @@ public DataSourcesFilter copyOf() {
 		public String getDisplayName() {
 			return BundleProvider.getBundle().getString("DataSourcesFilter.displayName.text");
 		}
+
 	}
 
 	/**
-	 * union of FileTypeFilters
+	 * A timeline events filter used to query for events with direct or indirect
+	 * event sources that are files with a given set of media types. The filter
+	 * is a union of one or more file source filters.
 	 */
 	static public final class FileTypesFilter extends UnionFilter<FileTypeFilter> {
 
@@ -820,41 +1004,52 @@ public String getDisplayName() {
 		}
 
 	}
-	
+
 	/**
-     * Gets all files that are NOT the specified types
-     */
-    static public class InverseFileTypeFilter extends FileTypeFilter {
+	 * A timeline events filter used to query for events with direct or indirect
+	 * event sources that are files that do not have a given set of media types.
+	 */
+	static public class InverseFileTypeFilter extends FileTypeFilter {
 
-        public InverseFileTypeFilter(String displayName, Collection<String> mediaTypes) {
-            super(displayName, mediaTypes);
-        }
+		public InverseFileTypeFilter(String displayName, Collection<String> mediaTypes) {
+			super(displayName, mediaTypes);
+		}
 
-        @Override
-        public InverseFileTypeFilter copyOf() {
-            return new InverseFileTypeFilter(getDisplayName(), super.mediaTypes);
-        }
+		@Override
+		public InverseFileTypeFilter copyOf() {
+			return new InverseFileTypeFilter(getDisplayName(), super.mediaTypes);
+		}
 
-        @Override
-        String getSQLWhere(TimelineManager manager) {
-            return " NOT " + super.getSQLWhere(manager);
-        }
-    }
+		@Override
+		String getSQLWhere(TimelineManager manager) {
+			return " NOT " + super.getSQLWhere(manager);
+		}
+	}
 
 	/**
-	 * Filter for events derived from files with the given media/mime-types.
+	 * A timeline events filter used to query for events with direct or indirect
+	 * event sources that are files with a given set of media types.
 	 */
 	public static class FileTypeFilter extends TimelineFilter {
 
 		private final String displayName;
 		private final String sqlWhere;
-		Collection <String> mediaTypes = new HashSet<>();
+		Collection<String> mediaTypes = new HashSet<>();
 
 		private FileTypeFilter(String displayName, String sql) {
 			this.displayName = displayName;
 			this.sqlWhere = sql;
 		}
 
+		/**
+		 * Constructs a timeline events filter used to query for events with
+		 * direct or indirect event sources that are files with a given set of
+		 * media types.
+		 *
+		 * @param displayName The display name for the filter.
+		 * @param mediaTypes  The event source file media types that pass the
+		 *                    filter.
+		 */
 		public FileTypeFilter(String displayName, Collection<String> mediaTypes) {
 			this(displayName,
 					mediaTypes.stream()
@@ -917,4 +1112,5 @@ public String toString() {
 		}
 
 	}
+
 }
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineLevelOfDetail.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineLevelOfDetail.java
new file mode 100755
index 0000000000000000000000000000000000000000..a0f7b8dc329e1709c2213fa54fa3e4f7e83e6b42
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineLevelOfDetail.java
@@ -0,0 +1,80 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel;
+
+import java.util.ResourceBundle;
+
+/**
+ * An enumeration of the levels of detail of various aspects of timeline data.
+ */
+public enum TimelineLevelOfDetail {
+
+	LOW(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("TimelineLevelOfDetail.low")),
+	MEDIUM(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("TimelineLevelOfDetail.medium")),
+	HIGH(ResourceBundle.getBundle("org.sleuthkit.datamodel.Bundle").getString("TimelineLevelOfDetail.high"));
+
+	private final String displayName;
+
+	/**
+	 * Gets the display name of this level of detail.
+	 *
+	 * @return The display name.
+	 */
+	public String getDisplayName() {
+		return displayName;
+	}
+
+	/**
+	 * Constructs an element of the enumeration of the levels of detail of
+	 * various aspects of timeline data such as event descriptions and the
+	 * timeline event types hierarchy.
+	 *
+	 * @param displayName The display name of the level of detail.
+	 */
+	private TimelineLevelOfDetail(String displayName) {
+		this.displayName = displayName;
+	}
+
+	/**
+	 * Gets the next higher level of detail relative to this level of detail.
+	 *
+	 * @return The next higher level of detail, may be null.
+	 */
+	public TimelineLevelOfDetail moreDetailed() {
+		try {
+			return values()[ordinal() + 1];
+		} catch (ArrayIndexOutOfBoundsException e) {
+			return null;
+		}
+	}
+
+	/**
+	 * Gets the next lower level of detail relative to this level of detail.
+	 *
+	 * @return The next lower level of detail, may be null.
+	 */
+	public TimelineLevelOfDetail lessDetailed() {
+		try {
+			return values()[ordinal() - 1];
+		} catch (ArrayIndexOutOfBoundsException e) {
+			return null;
+		}
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java b/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java
index 1297a907e4cf443a06b4a39d6ad6bdf58f43b382..415b3446abcdf5f394e6dfdd03e8709ed3f5f375 100644
--- a/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java
+++ b/bindings/java/src/org/sleuthkit/datamodel/TimelineManager.java
@@ -1,7 +1,7 @@
 /*
  * Sleuth Kit Data Model
  *
- * Copyright 2013-2019 Basis Technology Corp.
+ * Copyright 2018-2019 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,6 +18,7 @@
  */
 package org.sleuthkit.datamodel;
 
+import com.google.common.annotations.Beta;
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.ImmutableMap;
 import java.sql.PreparedStatement;
@@ -35,13 +36,10 @@
 import static java.util.Objects.isNull;
 import java.util.Optional;
 import java.util.Set;
-import java.util.logging.Logger;
 import java.util.stream.Collectors;
 import org.joda.time.DateTimeZone;
 import org.joda.time.Interval;
-import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT;
 import static org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE.TSK_TL_EVENT;
-import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME;
 import static org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TL_EVENT_TYPE;
 import static org.sleuthkit.datamodel.CollectionUtils.isNotEmpty;
 import org.sleuthkit.datamodel.SleuthkitCase.CaseDbConnection;
@@ -49,17 +47,14 @@
 import static org.sleuthkit.datamodel.StringUtils.buildCSVString;
 
 /**
- * Provides access to the Timeline features of SleuthkitCase
+ * Provides access to the timeline data in a case database.
  */
 public final class TimelineManager {
 
-	private static final Logger logger = Logger.getLogger(TimelineManager.class.getName());
-
 	/**
-	 * These event types are added to the DB in c++ land, but still need to be
-	 * put in the eventTypeIDMap
+	 * Timeline event types added to the case database when it is created.
 	 */
-	private static final ImmutableList<TimelineEventType> ROOT_BASE_AND_FILESYSTEM_TYPES
+	private static final ImmutableList<TimelineEventType> ROOT_CATEGORY_AND_FILESYSTEM_TYPES
 			= ImmutableList.of(
 					TimelineEventType.ROOT_EVENT_TYPE,
 					TimelineEventType.WEB_ACTIVITY,
@@ -71,61 +66,79 @@ public final class TimelineManager {
 					TimelineEventType.FILE_MODIFIED);
 
 	/**
-	 * These event types are predefined but not added to the DB by the C++ code.
-	 * They are added by the TimelineManager constructor.
+	 * Timeline event types added to the case database by the TimelineManager
+	 * constructor. Adding these types at runtime permits new child types of the
+	 * category types to be defined without modifying the table creation and
+	 * population code in the Sleuth Kit.
 	 */
 	private static final ImmutableList<TimelineEventType> PREDEFINED_EVENT_TYPES
 			= new ImmutableList.Builder<TimelineEventType>()
 					.add(TimelineEventType.CUSTOM_TYPES)
-					.addAll(TimelineEventType.WEB_ACTIVITY.getSubTypes())
-					.addAll(TimelineEventType.MISC_TYPES.getSubTypes())
-					.addAll(TimelineEventType.CUSTOM_TYPES.getSubTypes())
+					.addAll(TimelineEventType.WEB_ACTIVITY.getChildren())
+					.addAll(TimelineEventType.MISC_TYPES.getChildren())
+					.addAll(TimelineEventType.CUSTOM_TYPES.getChildren())
 					.build();
 
-	private final SleuthkitCase sleuthkitCase;
+	private final SleuthkitCase caseDB;
 
 	/**
-	 * map from event type id to TimelineEventType object.
+	 * Mapping of timeline event type IDs to TimelineEventType objects.
 	 */
 	private final Map<Long, TimelineEventType> eventTypeIDMap = new HashMap<>();
 
-	TimelineManager(SleuthkitCase tskCase) throws TskCoreException {
-		sleuthkitCase = tskCase;
+	/**
+	 * Constructs a timeline manager that provides access to the timeline data
+	 * in a case database.
+	 *
+	 * @param caseDB The case database.
+	 *
+	 * @throws TskCoreException If there is an error constructing the timeline
+	 *                          manager.
+	 */
+	TimelineManager(SleuthkitCase caseDB) throws TskCoreException {
+		this.caseDB = caseDB;
 
 		//initialize root and base event types, these are added to the DB in c++ land
-		ROOT_BASE_AND_FILESYSTEM_TYPES.forEach(eventType -> eventTypeIDMap.put(eventType.getTypeID(), eventType));
+		ROOT_CATEGORY_AND_FILESYSTEM_TYPES.forEach(eventType -> eventTypeIDMap.put(eventType.getTypeID(), eventType));
 
 		//initialize the other event types that aren't added in c++
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
-		try (final CaseDbConnection con = sleuthkitCase.getConnection();
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (final CaseDbConnection con = caseDB.getConnection();
 				final Statement statement = con.createStatement()) {
 			for (TimelineEventType type : PREDEFINED_EVENT_TYPES) {
 				con.executeUpdate(statement,
 						insertOrIgnore(" INTO tsk_event_types(event_type_id, display_name, super_type_id) "
 								+ "VALUES( " + type.getTypeID() + ", '"
 								+ escapeSingleQuotes(type.getDisplayName()) + "',"
-								+ type.getSuperType().getTypeID()
+								+ type.getParent().getTypeID()
 								+ ")")); //NON-NLS
 				eventTypeIDMap.put(type.getTypeID(), type);
 			}
 		} catch (SQLException ex) {
-			throw new TskCoreException("Failed to initialize event types.", ex); // NON-NLS
+			throw new TskCoreException("Failed to initialize timeline event types", ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
 	}
 
-	SleuthkitCase getSleuthkitCase() {
-		return sleuthkitCase;
-	}
-
+	/**
+	 * Gets the smallest possible time interval that spans a collection of
+	 * timeline events.
+	 *
+	 * @param eventIDs The event IDs of the events for which to obtain the
+	 *                 spanning interval.
+	 *
+	 * @return The minimal spanning interval, may be null.
+	 *
+	 * @throws TskCoreException If there is an error querying the case database.
+	 */
 	public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreException {
 		if (eventIDs.isEmpty()) {
 			return null;
 		}
-		final String query = "SELECT Min(time) as minTime, Max(time) as maxTime FROM tsk_events WHERE event_id IN (" + buildCSVString(eventIDs) + ")";//NON-NLS
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		final String query = "SELECT Min(time) as minTime, Max(time) as maxTime FROM tsk_events WHERE event_id IN (" + buildCSVString(eventIDs) + ")"; //NON-NLS
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement();
 				ResultSet results = stmt.executeQuery(query);) {
 			if (results.next()) {
@@ -134,55 +147,22 @@ public Interval getSpanningInterval(Collection<Long> eventIDs) throws TskCoreExc
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error executing get spanning interval query: " + query, ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 		return null;
 	}
 
 	/**
-	 * Get a count of tagnames applied to the given event ids as a map from
-	 * tagname displayname to count of tag applications
+	 * Gets the smallest possible time interval that spans a collection of
+	 * timeline events.
 	 *
-	 * @param eventIDsWithTags the event ids to get the tag counts map for
+	 * @param timeRange A time range that the events must be within.
+	 * @param filter    A timeline events filter that the events must pass.
+	 * @param timeZone  The time zone for the returned time interval.
 	 *
-	 * @return a map from tagname displayname to count of applications
+	 * @return The minimal spanning interval, may be null.
 	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
-	 */
-	public Map<String, Long> getTagCountsByTagName(Set<Long> eventIDsWithTags) throws TskCoreException {
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		String query
-				= "SELECT tag_names.display_name AS display_name, COUNT(distinct tag_id) AS count FROM "
-				+ getAugmentedEventsTablesSQL(true, false, false)
-				+ " JOIN tag_names ON (tsk_events.tag_name_id = tag_names.tag_name_id ) "
-				+ " WHERE event_id IN (" + buildCSVString(eventIDsWithTags) + ") "
-				+ " GROUP BY tag_names.tag_name_id";//NON-NLS
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
-				Statement statement = con.createStatement();
-				ResultSet resultSet = statement.executeQuery(query);) {
-			HashMap<String, Long> counts = new HashMap<>();
-			while (resultSet.next()) {
-				counts.put(resultSet.getString("display_name"), resultSet.getLong("count")); //NON-NLS
-			}
-			return counts;
-		} catch (SQLException ex) {
-			throw new TskCoreException("Failed to get tag counts by tag name with query: " + query, ex); //NON-NLS
-		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
-		}
-	}
-
-	/**
-	 * Get the minimal interval that bounds all the vents that pass the given
-	 * filter.
-	 *
-	 * @param timeRange The timerange that the events must be within.
-	 * @param filter    The filter that the events must pass.
-	 * @param timeZone  The timeZone to return the interval in.
-	 *
-	 * @return The minimal interval that bounds the events.
-	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
 	public Interval getSpanningInterval(Interval timeRange, TimelineFilter.RootFilter filter, DateTimeZone timeZone) throws TskCoreException {
 		long start = timeRange.getStartMillis() / 1000;
@@ -193,8 +173,8 @@ public Interval getSpanningInterval(Interval timeRange, TimelineFilter.RootFilte
 				+ "			 WHERE time <=" + start + " AND " + sqlWhere + ") AS start,"
 				+ "		 (SELECT Min(time)  FROM " + augmentedEventsTablesSQL
 				+ "			 WHERE time >= " + end + " AND " + sqlWhere + ") AS end";//NON-NLS
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement(); //can't use prepared statement because of complex where clause
 				ResultSet results = stmt.executeQuery(queryString);) {
 
@@ -203,23 +183,31 @@ public Interval getSpanningInterval(Interval timeRange, TimelineFilter.RootFilte
 				long end2 = results.getLong("end"); // NON-NLS
 
 				if (end2 == 0) {
-					end2 = getMaxTime();
+					end2 = getMaxEventTime();
 				}
 				return new Interval(start2 * 1000, (end2 + 1) * 1000, timeZone);
 			}
 		} catch (SQLException ex) {
 			throw new TskCoreException("Failed to get MIN time.", ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 		return null;
 	}
 
+	/**
+	 * Gets the timeline event with a given event ID.
+	 *
+	 * @param eventID An event ID.
+	 *
+	 * @return The timeline event, may be null.
+	 *
+	 * @throws TskCoreException If there is an error querying the case database.
+	 */
 	public TimelineEvent getEventById(long eventID) throws TskCoreException {
-		String sql = "SELECT * FROM  " + getAugmentedEventsTablesSQL(false, false, false) + " WHERE event_id = " + eventID;
-
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		String sql = "SELECT * FROM  " + getAugmentedEventsTablesSQL(false) + " WHERE event_id = " + eventID;
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement();) {
 			try (ResultSet results = stmt.executeQuery(sql);) {
 				if (results.next()) {
@@ -227,7 +215,7 @@ public TimelineEvent getEventById(long eventID) throws TskCoreException {
 					TimelineEventType type = getEventType(typeID).orElseThrow(() -> newEventTypeMappingException(typeID)); //NON-NLS
 					return new TimelineEvent(eventID,
 							results.getLong("data_source_obj_id"),
-							results.getLong("file_obj_id"),
+							results.getLong("content_obj_id"),
 							results.getLong("artifact_id"),
 							results.getLong("time"),
 							type, results.getString("full_description"),
@@ -238,24 +226,23 @@ public TimelineEvent getEventById(long eventID) throws TskCoreException {
 				}
 			}
 		} catch (SQLException sqlEx) {
-			throw new TskCoreException("exception while querying for event with id = " + eventID, sqlEx); // NON-NLS
+			throw new TskCoreException("Error while executing query " + sql, sqlEx); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 		return null;
 	}
 
 	/**
-	 * Get the IDs of all the events within the given time range that pass the
-	 * given filter.
+	 * Gets the event IDs of the timeline events within a given time range that
+	 * pass a given timeline events filter.
 	 *
-	 * @param timeRange The Interval that all returned events must be within.
-	 * @param filter    The Filter that all returned events must pass.
+	 * @param timeRange The time range that the events must be within.
+	 * @param filter    The timeline events filter that the events must pass.
 	 *
-	 * @return A List of event ids, sorted by timestamp of the corresponding
-	 *         event..
+	 * @return A list of event IDs ordered by event time.
 	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
 	public List<Long> getEventIDs(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException {
 		Long startTime = timeRange.getStartMillis() / 1000;
@@ -269,8 +256,8 @@ public List<Long> getEventIDs(Interval timeRange, TimelineFilter.RootFilter filt
 
 		String query = "SELECT tsk_events.event_id AS event_id FROM " + getAugmentedEventsTablesSQL(filter)
 				+ " WHERE time >=  " + startTime + " AND time <" + endTime + " AND " + getSQLWhere(filter) + " ORDER BY time ASC"; // NON-NLS
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement();
 				ResultSet results = stmt.executeQuery(query);) {
 			while (results.next()) {
@@ -278,119 +265,91 @@ public List<Long> getEventIDs(Interval timeRange, TimelineFilter.RootFilter filt
 			}
 
 		} catch (SQLException sqlEx) {
-			throw new TskCoreException("failed to execute query for event ids in range", sqlEx); // NON-NLS
+			throw new TskCoreException("Error while executing query " + query, sqlEx); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 
 		return resultIDs;
 	}
 
-	
 	/**
-	 * Get a the hashset names for hash sets with hits.
+	 * Gets the maximum timeline event time in the case database.
 	 *
-	 * @return A set of hashset names which have hits.
+	 * @return The maximum timeline event time in seconds since the UNIX epoch,
+	 *         or -1 if there are no timeline events in the case database.
 	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
-	public Set< String> getHashSetNames() throws TskCoreException {
-		Set< String> hashSets = new HashSet<>();
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-
-		String query = "SELECT DISTINCT value_text AS hash_set_name FROM blackboard_artifacts "
-				+ " JOIN blackboard_attributes ON (blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id) "
-				+ " JOIN blackboard_artifact_types ON( blackboard_artifacts.artifact_type_id = blackboard_artifact_types.artifact_type_id) "
-				+ " WHERE blackboard_artifact_types.artifact_type_id = " + BlackboardArtifact.ARTIFACT_TYPE.TSK_HASHSET_HIT.getTypeID()
-				+ " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID(); //NON-NLS
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
-				Statement stms = con.createStatement();
-				ResultSet results = stms.executeQuery(query);) {
-			while (results.next()) {
-				String hashSetName = results.getString("hash_set_name"); //NON-NLS
-				hashSets.add(hashSetName);
-			}
-		} catch (SQLException ex) {
-			throw new TskCoreException("Failed to get hash sets.", ex); // NON-NLS
-		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
-		}
-		return Collections.unmodifiableSet(hashSets);
-	}
-
-	/**
-	 * @return maximum time in seconds from unix epoch
-	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
-	 */
-	public Long getMaxTime() throws TskCoreException {
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+	public Long getMaxEventTime() throws TskCoreException {
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stms = con.createStatement();
 				ResultSet results = stms.executeQuery(STATEMENTS.GET_MAX_TIME.getSQL());) {
 			if (results.next()) {
 				return results.getLong("max"); // NON-NLS
 			}
 		} catch (SQLException ex) {
-			throw new TskCoreException("Failed to get MAX time.", ex); // NON-NLS
+			throw new TskCoreException("Error while executing query " + STATEMENTS.GET_MAX_TIME.getSQL(), ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 		return -1l;
 	}
 
 	/**
-	 * @return maximum time in seconds from unix epoch
+	 * Gets the minimum timeline event time in the case database.
+	 *
+	 * @return The minimum timeline event time in seconds since the UNIX epoch,
+	 *         or -1 if there are no timeline events in the case database.
 	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
-	public Long getMinTime() throws TskCoreException {
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+	public Long getMinEventTime() throws TskCoreException {
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stms = con.createStatement();
 				ResultSet results = stms.executeQuery(STATEMENTS.GET_MIN_TIME.getSQL());) {
 			if (results.next()) {
 				return results.getLong("min"); // NON-NLS
 			}
 		} catch (SQLException ex) {
-			throw new TskCoreException("Failed to get MIN time.", ex); // NON-NLS
+			throw new TskCoreException("Error while executing query " + STATEMENTS.GET_MAX_TIME.getSQL(), ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 		return -1l;
 	}
 
 	/**
-	 * Get an TimelineEventType object given it's ID.
+	 * Gets the timeline event type with a given event type ID.
 	 *
-	 * @param eventTypeID The ID of the event type to get.
+	 * @param eventTypeID An event type ID.
 	 *
-	 * @return An Optional containing the TimelineEventType, or an empty Optional if no
-         TimelineEventType with the given ID was found.
+	 * @return The timeline event type in an Optional object, may be empty if
+	 *         the event type is not found.
 	 */
 	public Optional<TimelineEventType> getEventType(long eventTypeID) {
 		return Optional.ofNullable(eventTypeIDMap.get(eventTypeID));
 	}
 
 	/**
-	 * Get a list of all the EventTypes.
+	 * Gets all of the timeline event types in the case database.
 	 *
-	 * @return A list of all the eventTypes.
+	 * @return A list of timeline event types.
 	 */
 	public ImmutableList<TimelineEventType> getEventTypes() {
 		return ImmutableList.copyOf(eventTypeIDMap.values());
 	}
 
 	private String insertOrIgnore(String query) {
-		switch (sleuthkitCase.getDatabaseType()) {
+		switch (caseDB.getDatabaseType()) {
 			case POSTGRESQL:
 				return " INSERT " + query + " ON CONFLICT DO NOTHING "; //NON-NLS
 			case SQLITE:
 				return " INSERT OR IGNORE " + query; //NON-NLS
 			default:
-				throw new UnsupportedOperationException("Unsupported DB type: " + sleuthkitCase.getDatabaseType().name());
+				throw new UnsupportedOperationException("Unsupported DB type: " + caseDB.getDatabaseType().name());
 		}
 	}
 
@@ -414,15 +373,14 @@ String getSQL() {
 	}
 
 	/**
-	 * Get a List of event IDs for the events that are derived from the given
-	 * artifact.
+	 * Gets a list of event IDs for the timeline events that have a given
+	 * artifact as the event source.
 	 *
-	 * @param artifact The BlackboardArtifact to get derived event IDs for.
+	 * @param artifact An artifact.
 	 *
-	 * @return A List of event IDs for the events that are derived from the
-	 *         given artifact.
+	 * @return The list of event IDs.
 	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
 	public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws TskCoreException {
 		ArrayList<Long> eventIDs = new ArrayList<>();
@@ -431,8 +389,8 @@ public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws Tsk
 				= "SELECT event_id FROM tsk_events "
 				+ " LEFT JOIN tsk_event_descriptions on ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id ) "
 				+ " WHERE artifact_id = " + artifact.getArtifactID();
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement();
 				ResultSet results = stmt.executeQuery(query);) {
 			while (results.next()) {
@@ -441,30 +399,31 @@ public List<Long> getEventIDsForArtifact(BlackboardArtifact artifact) throws Tsk
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error executing getEventIDsForArtifact query.", ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 		return eventIDs;
 	}
 
 	/**
-	 * Get a Set of event IDs for the events that are derived from the given
-	 * file.
+	 * Gets a list of event IDs for the timeline events that have a given
+	 * content as the event source.
 	 *
-	 * @param file                    The File / data source to get derived
-	 *                                event IDs for.
+	 * @param content                 The content.
 	 * @param includeDerivedArtifacts If true, also get event IDs for events
-	 *                                derived from artifacts derived form this
-	 *                                file. If false, only gets events derived
-	 *                                directly from this file (file system
-	 *                                timestamps).
+	 *                                where the event source is an artifact that
+	 *                                has the given content as its source.
 	 *
-	 * @return A Set of event IDs for the events that are derived from the given
-	 *         file.
+	 * @return The list of event IDs.
 	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
-	public Set<Long> getEventIDsForFile(Content file, boolean includeDerivedArtifacts) throws TskCoreException {
-		return getEventAndDescriptionIDs(file.getId(), includeDerivedArtifacts).keySet();
+	public Set<Long> getEventIDsForContent(Content content, boolean includeDerivedArtifacts) throws TskCoreException {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection conn = caseDB.getConnection()) {
+			return getEventAndDescriptionIDs(conn, content.getId(), includeDerivedArtifacts).keySet();
+		} finally {
+			caseDB.releaseSingleUserCaseWriteLock();
+		}
 	}
 
 	/**
@@ -489,7 +448,7 @@ private long addEventDescription(long dataSourceObjId, long fileObjId, Long arti
 			boolean hasHashHits, boolean tagged, CaseDbConnection connection) throws TskCoreException {
 		String insertDescriptionSql
 				= "INSERT INTO tsk_event_descriptions ( "
-				+ "data_source_obj_id, file_obj_id, artifact_id,  "
+				+ "data_source_obj_id, content_obj_id, artifact_id,  "
 				+ " full_description, med_description, short_description, "
 				+ " hash_hit, tagged "
 				+ " ) VALUES ("
@@ -503,7 +462,7 @@ private long addEventDescription(long dataSourceObjId, long fileObjId, Long arti
 				+ booleanToInt(tagged)
 				+ " )";
 
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
+		caseDB.acquireSingleUserCaseWriteLock();
 		try (Statement insertDescriptionStmt = connection.createStatement()) {
 			connection.executeUpdate(insertDescriptionStmt, insertDescriptionSql, PreparedStatement.RETURN_GENERATED_KEYS);
 			try (ResultSet generatedKeys = insertDescriptionStmt.getGeneratedKeys()) {
@@ -513,11 +472,11 @@ private long addEventDescription(long dataSourceObjId, long fileObjId, Long arti
 		} catch (SQLException ex) {
 			throw new TskCoreException("Failed to insert event description.", ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
 	}
 
-	Collection<TimelineEvent> addAbstractFileEvents(AbstractFile file, CaseDbConnection connection) throws TskCoreException {
+	Collection<TimelineEvent> addEventsForNewFile(AbstractFile file, CaseDbConnection connection) throws TskCoreException {
 		//gather time stamps into map
 		Map<TimelineEventType, Long> timeMap = ImmutableMap.of(TimelineEventType.FILE_CREATED, file.getCrtime(),
 				TimelineEventType.FILE_ACCESSED, file.getAtime(),
@@ -525,21 +484,17 @@ Collection<TimelineEvent> addAbstractFileEvents(AbstractFile file, CaseDbConnect
 				TimelineEventType.FILE_MODIFIED, file.getMtime());
 
 		/*
-		 * If there are no legitimate ( greater than zero ) time stamps ( eg,
-		 * logical/local files) skip the rest of the event generation: this
-		 * should result in dropping logical files, since they do not have
-		 * legitimate time stamps.
+		 * If there are no legitimate ( greater than zero ) time stamps skip the
+		 * rest of the event generation.
 		 */
 		if (Collections.max(timeMap.values()) <= 0) {
 			return Collections.emptySet();
 		}
 
-		boolean hashHashHits = CollectionUtils.isNotEmpty(file.getHashSetNames());
-		boolean hasTags = CollectionUtils.isNotEmpty(sleuthkitCase.getContentTagsByContent(file));
 		String description = file.getParentPath() + file.getName();
 		long fileObjId = file.getId();
 		Set<TimelineEvent> events = new HashSet<>();
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
+		caseDB.acquireSingleUserCaseWriteLock();
 		try {
 			long descriptionID = addEventDescription(file.getDataSourceObjectId(), fileObjId, null,
 					description, null, null, false, false, connection);
@@ -550,17 +505,22 @@ Collection<TimelineEvent> addAbstractFileEvents(AbstractFile file, CaseDbConnect
 					TimelineEventType type = timeEntry.getKey();
 					long eventID = addEventWithExistingDescription(time, type, descriptionID, connection);
 
+					/*
+					 * Last two flags indicating hasTags and hasHashHits are
+					 * both set to false with the assumption that this is not
+					 * possible for a new file. See JIRA-5407
+					 */
 					events.add(new TimelineEvent(eventID, descriptionID, fileObjId, null, time, type,
-							description, null, null, hashHashHits, hasTags));
+							description, null, null, false, false));
 				}
 			}
 
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
 		events.stream()
 				.map(TimelineEventAddedEvent::new)
-				.forEach(sleuthkitCase::fireTSKEvent);
+				.forEach(caseDB::fireTSKEvent);
 
 		return events;
 	}
@@ -596,8 +556,8 @@ Set<TimelineEvent> addArtifactEvents(BlackboardArtifact artifact) throws TskCore
 				eventType = eventTypeIDMap.getOrDefault(eventTypeID, TimelineEventType.OTHER);
 			}
 
-			// @@@ This casting is risky if we change class hierarchy, but was expediant.  Should move parsing to another class
-			addArtifactEvent(((TimelineEventArtifactTypeImpl)TimelineEventType.OTHER)::makeEventDescription, eventType, artifact)
+			// @@@ This casting is risky if we change class hierarchy, but was expedient.  Should move parsing to another class
+			addArtifactEvent(((TimelineEventArtifactTypeImpl) TimelineEventType.OTHER)::makeEventDescription, eventType, artifact)
 					.ifPresent(newEvents::add);
 		} else {
 			/*
@@ -617,7 +577,7 @@ Set<TimelineEvent> addArtifactEvents(BlackboardArtifact artifact) throws TskCore
 		}
 		newEvents.stream()
 				.map(TimelineEventAddedEvent::new)
-				.forEach(sleuthkitCase::fireTSKEvent);
+				.forEach(caseDB::fireTSKEvent);
 		return newEvents;
 	}
 
@@ -649,24 +609,24 @@ private Optional<TimelineEvent> addArtifactEvent(TSKCoreCheckedFunction<Blackboa
 		if (time <= 0) {
 			return Optional.empty();
 		}
-		String fullDescription = eventPayload.getFullDescription();
-		String medDescription = eventPayload.getMediumDescription();
-		String shortDescription = eventPayload.getShortDescription();
+		String fullDescription = eventPayload.getDescription(TimelineLevelOfDetail.HIGH);
+		String medDescription = eventPayload.getDescription(TimelineLevelOfDetail.MEDIUM);
+		String shortDescription = eventPayload.getDescription(TimelineLevelOfDetail.LOW);
 		long artifactID = artifact.getArtifactID();
 		long fileObjId = artifact.getObjectID();
 		long dataSourceObjectID = artifact.getDataSourceObjectID();
 
-		AbstractFile file = sleuthkitCase.getAbstractFileById(fileObjId);
+		AbstractFile file = caseDB.getAbstractFileById(fileObjId);
 		boolean hasHashHits = false;
 		// file will be null if source was data source or some non-file
 		if (file != null) {
 			hasHashHits = isNotEmpty(file.getHashSetNames());
 		}
-		boolean tagged = isNotEmpty(sleuthkitCase.getBlackboardArtifactTagsByArtifact(artifact));
+		boolean tagged = isNotEmpty(caseDB.getBlackboardArtifactTagsByArtifact(artifact));
 
 		TimelineEvent event;
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
-		try (CaseDbConnection connection = getSleuthkitCase().getConnection();) {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection connection = caseDB.getConnection();) {
 
 			long descriptionID = addEventDescription(dataSourceObjectID, fileObjId, artifactID,
 					fullDescription, medDescription, shortDescription,
@@ -679,7 +639,7 @@ private Optional<TimelineEvent> addArtifactEvent(TSKCoreCheckedFunction<Blackboa
 					hasHashHits, tagged);
 
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
 		return Optional.of(event);
 	}
@@ -689,7 +649,7 @@ private long addEventWithExistingDescription(Long time, TimelineEventType type,
 				= "INSERT INTO tsk_events ( event_type_id, event_description_id , time) "
 				+ " VALUES (" + type.getTypeID() + ", " + descriptionID + ", " + time + ")";
 
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
+		caseDB.acquireSingleUserCaseWriteLock();
 		try (Statement insertRowStmt = connection.createStatement();) {
 			connection.executeUpdate(insertRowStmt, insertEventSql, PreparedStatement.RETURN_GENERATED_KEYS);
 
@@ -700,7 +660,7 @@ private long addEventWithExistingDescription(Long time, TimelineEventType type,
 		} catch (SQLException ex) {
 			throw new TskCoreException("Failed to insert event for existing description.", ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
 	}
 
@@ -708,123 +668,189 @@ static private String quotePreservingNull(String value) {
 		return isNull(value) ? " NULL " : "'" + escapeSingleQuotes(value) + "'";//NON-NLS
 	}
 
+	private Map<Long, Long> getEventAndDescriptionIDs(CaseDbConnection conn, long contentObjID, boolean includeArtifacts) throws TskCoreException {
+		return getEventAndDescriptionIDsHelper(conn, contentObjID, (includeArtifacts ? "" : " AND artifact_id IS NULL"));
+	}
+
+	private Map<Long, Long> getEventAndDescriptionIDs(CaseDbConnection conn, long contentObjID, Long artifactID) throws TskCoreException {
+		return getEventAndDescriptionIDsHelper(conn, contentObjID, " AND artifact_id = " + artifactID);
+	}
+
+	private Map<Long, Long> getEventAndDescriptionIDsHelper(CaseDbConnection con, long fileObjID, String artifactClause) throws TskCoreException {
+		//map from event_id to the event_description_id for that event.
+		Map<Long, Long> eventIDToDescriptionIDs = new HashMap<>();
+		String sql = "SELECT event_id, tsk_events.event_description_id"
+				+ " FROM tsk_events "
+				+ " LEFT JOIN tsk_event_descriptions ON ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id )"
+				+ " WHERE content_obj_id = " + fileObjID
+				+ artifactClause;
+		try (Statement selectStmt = con.createStatement(); ResultSet executeQuery = selectStmt.executeQuery(sql);) {
+			while (executeQuery.next()) {
+				eventIDToDescriptionIDs.put(executeQuery.getLong("event_id"), executeQuery.getLong("event_description_id")); //NON-NLS
+			}
+		} catch (SQLException ex) {
+			throw new TskCoreException("Error getting event description ids for object id = " + fileObjID, ex);
+		}
+		return eventIDToDescriptionIDs;
+	}
+
 	/**
-	 * Get events that are associated with the file
+	 * Finds all of the timeline events directly associated with a given content
+	 * and marks them as having an event source that is tagged. This does not
+	 * include timeline events where the event source is an artifact, even if
+	 * the artifact source is the tagged content.
 	 *
-	 * @param fileObjID
-	 * @param includeArtifacts true if results should also include events from
-	 *                         artifacts associated with the file.
+	 * @param content The content.
 	 *
-	 * @return A map from event_id to event_decsription_id.
+	 * @return The event IDs of the events that were marked as having a tagged
+	 *         event source.
 	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException If there is an error updating the case database.
+	 *
+	 * WARNING: THIS IS A BETA VERSION OF THIS METHOD, SUBJECT TO CHANGE AT ANY
+	 * TIME.
 	 */
-	private Map<Long, Long> getEventAndDescriptionIDs(long fileObjID, boolean includeArtifacts) throws TskCoreException {
-		return getEventAndDescriptionIDsHelper(fileObjID, (includeArtifacts ? "" : " AND artifact_id IS NULL"));
+	@Beta
+	public Set<Long> updateEventsForContentTagAdded(Content content) throws TskCoreException {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection conn = caseDB.getConnection()) {
+			Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, content.getId(), false);
+			updateEventSourceTaggedFlag(conn, eventIDs.values(), 1);
+			return eventIDs.keySet();
+		} finally {
+			caseDB.releaseSingleUserCaseWriteLock();
+		}
 	}
 
 	/**
-	 * Get events that match both the file and artifact IDs
+	 * Finds all of the timeline events directly associated with a given content
+	 * and marks them as not having an event source that is tagged, if and only
+	 * if there are no other tags on the content. The inspection of events does
+	 * not include events where the event source is an artifact, even if the
+	 * artifact source is the content from which trhe tag was removed.
 	 *
-	 * @param fileObjID
-	 * @param artifactID
+	 * @param content The content.
 	 *
-	 * @return A map from event_id to event_decsription_id.
+	 * @return The event IDs of the events that were marked as not having a
+	 *         tagged event source.
 	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException If there is an error updating the case database.
+	 *
+	 * WARNING: THIS IS A BETA VERSION OF THIS METHOD, SUBJECT TO CHANGE AT ANY
+	 * TIME.
 	 */
-	private Map<Long, Long> getEventAndDescriptionIDs(long fileObjID, Long artifactID) throws TskCoreException {
-		return getEventAndDescriptionIDsHelper(fileObjID, " AND artifact_id = " + artifactID);
+	@Beta
+	public Set<Long> updateEventsForContentTagDeleted(Content content) throws TskCoreException {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection conn = caseDB.getConnection()) {
+			if (caseDB.getContentTagsByContent(content).isEmpty()) {
+				Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, content.getId(), false);
+				updateEventSourceTaggedFlag(conn, eventIDs.values(), 0);
+				return eventIDs.keySet();
+			} else {
+				return Collections.emptySet();
+			}
+		} finally {
+			caseDB.releaseSingleUserCaseWriteLock();
+		}
 	}
 
 	/**
-	 * Get a map containging event_id and their corresponding
-	 * event_description_ids.
+	 * Finds all of the timeline events directly associated with a given
+	 * artifact and marks them as having an event source that is tagged.
 	 *
-	 * @param fileObjID      get event Ids for events that are derived from the
-	 *                       file with this id.
-	 * @param artifactClause SQL clause that clients can pass in to filter the
-	 *                       returned ids.
+	 * @param artifact The artifact.
 	 *
-	 * @return A map from event_id to event_decsription_id.
+	 * @return The event IDs of the events that were marked as having a tagged
+	 *         event source.
 	 *
-	 * @throws TskCoreException
+	 * @throws TskCoreException If there is an error updating the case database.
 	 */
-	private Map<Long, Long> getEventAndDescriptionIDsHelper(long fileObjID, String artifactClause) throws TskCoreException {
-		//map from event_id to the event_description_id for that event.
-		Map<Long, Long> eventIDToDescriptionIDs = new HashMap<>();
-		String sql = "SELECT event_id, tsk_events.event_description_id"
-				+ " FROM tsk_events "
-				+ " LEFT JOIN tsk_event_descriptions ON ( tsk_events.event_description_id = tsk_event_descriptions.event_description_id )"
-				+ " WHERE file_obj_id = " + fileObjID
-				+ artifactClause;
-
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
-				Statement selectStmt = con.createStatement();
-				ResultSet executeQuery = selectStmt.executeQuery(sql);) {
-			while (executeQuery.next()) {
-				eventIDToDescriptionIDs.put(executeQuery.getLong("event_id"), executeQuery.getLong("event_description_id")); //NON-NLS
-			}
-		} catch (SQLException ex) {
-			throw new TskCoreException("Error getting event description ids for object id = " + fileObjID, ex);
+	public Set<Long> updateEventsForArtifactTagAdded(BlackboardArtifact artifact) throws TskCoreException {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection conn = caseDB.getConnection()) {
+			Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, artifact.getObjectID(), artifact.getArtifactID());
+			updateEventSourceTaggedFlag(conn, eventIDs.values(), 1);
+			return eventIDs.keySet();
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
-		return eventIDToDescriptionIDs;
 	}
 
 	/**
-	 * Set any events with the given object and artifact ids as tagged.
+	 * Finds all of the timeline events directly associated with a given
+	 * artifact and marks them as not having an event source that is tagged, if
+	 * and only if there are no other tags on the artifact.
 	 *
-	 * @param fileObjId  the obj_id that this tag applies to, the id of the
-	 *                   content that the artifact is derived from for artifact
-	 *                   tags
-	 * @param artifactID the artifact_id that this tag applies to, or null if
-	 *                   this is a content tag
-	 * @param tagged     true to mark the matching events tagged, false to mark
-	 *                   them as untagged
+	 * @param artifact The artifact.
 	 *
-	 * @return the event ids that match the object/artifact pair.
+	 * @return The event IDs of the events that were marked as not having a
+	 *         tagged event source.
 	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * @throws TskCoreException If there is an error updating the case database.
 	 */
-	public Set<Long> setEventsTagged(long fileObjId, Long artifactID, boolean tagged) throws TskCoreException {
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
-		Map<Long, Long> eventIDs;  // map from event_ids to event_description_ids
-		if (Objects.isNull(artifactID)) {
-			eventIDs = getEventAndDescriptionIDs(fileObjId, false);
-		} else {
-			eventIDs = getEventAndDescriptionIDs(fileObjId, artifactID);
+	public Set<Long> updateEventsForArtifactTagDeleted(BlackboardArtifact artifact) throws TskCoreException {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection conn = caseDB.getConnection()) {
+			if (caseDB.getBlackboardArtifactTagsByArtifact(artifact).isEmpty()) {
+				Map<Long, Long> eventIDs = getEventAndDescriptionIDs(conn, artifact.getObjectID(), artifact.getArtifactID());
+				updateEventSourceTaggedFlag(conn, eventIDs.values(), 0);
+				return eventIDs.keySet();
+			} else {
+				return Collections.emptySet();
+			}
+		} finally {
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
+	}
 
-		//update tagged state for all event with selected ids
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
-				Statement updateStatement = con.createStatement();) {
-			updateStatement.executeUpdate("UPDATE tsk_event_descriptions SET tagged = " + booleanToInt(tagged)
-					+ " WHERE event_description_id IN (" + buildCSVString(eventIDs.values()) + ")"); //NON-NLS
+	private void updateEventSourceTaggedFlag(CaseDbConnection conn, Collection<Long> eventDescriptionIDs, int flagValue) throws TskCoreException {
+		if (eventDescriptionIDs.isEmpty()) {
+			return;
+		}
+		
+		String sql = "UPDATE tsk_event_descriptions SET tagged = " + flagValue + " WHERE event_description_id IN (" + buildCSVString(eventDescriptionIDs) + ")"; //NON-NLS
+		try (Statement updateStatement = conn.createStatement()) {
+			updateStatement.executeUpdate(sql);
 		} catch (SQLException ex) {
-			throw new TskCoreException("Error marking events tagged", ex);//NON-NLS
-		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			throw new TskCoreException("Error marking content events tagged: " + sql, ex);//NON-NLS
 		}
-		return eventIDs.keySet();
 	}
 
-	public Set<Long> setEventsHashed(long fileObjdId, boolean hashHits) throws TskCoreException {
-		sleuthkitCase.acquireSingleUserCaseWriteLock();
-		Map<Long, Long> eventIDs = getEventAndDescriptionIDs(fileObjdId, true);
-
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
-				Statement updateStatement = con.createStatement();) {
-			updateStatement.executeUpdate("UPDATE tsk_event_descriptions SET hash_hit = " + booleanToInt(hashHits) //NON-NLS
-					+ " WHERE event_description_id IN (" + buildCSVString(eventIDs.values()) + ")"); //NON-NLS
+	/**
+	 * Finds all of the timeline events associated directly or indirectly with a
+	 * given content and marks them as having an event source that has a hash
+	 * set hit. This includes both the events that have the content as their
+	 * event source and the events for which the content is the source content
+	 * for the source artifact of the event.
+	 *
+	 * @param content The content.
+	 *
+	 * @return The event IDs of the events that were marked as having an event
+	 *         source with a hash set hit.
+	 *
+	 * @throws TskCoreException If there is an error updating the case database.
+	 */
+	public Set<Long> updateEventsForHashSetHit(Content content) throws TskCoreException {
+		caseDB.acquireSingleUserCaseWriteLock();
+		try (CaseDbConnection con = caseDB.getConnection(); Statement updateStatement = con.createStatement();) {
+			Map<Long, Long> eventIDs = getEventAndDescriptionIDs(con, content.getId(), true);
+			if (! eventIDs.isEmpty()) {
+				String sql = "UPDATE tsk_event_descriptions SET hash_hit = 1" + " WHERE event_description_id IN (" + buildCSVString(eventIDs.values()) + ")"; //NON-NLS
+				try {
+					updateStatement.executeUpdate(sql); //NON-NLS
+					return eventIDs.keySet();
+				} catch (SQLException ex) {
+					throw new TskCoreException("Error setting hash_hit of events.", ex);//NON-NLS
+				}
+			} else {
+				return eventIDs.keySet();
+			}
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error setting hash_hit of events.", ex);//NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseWriteLock();
+			caseDB.releaseSingleUserCaseWriteLock();
 		}
-		return eventIDs.keySet();
 	}
 
 	void rollBackTransaction(SleuthkitCase.CaseDbTransaction trans) throws TskCoreException {
@@ -832,35 +858,36 @@ void rollBackTransaction(SleuthkitCase.CaseDbTransaction trans) throws TskCoreEx
 	}
 
 	/**
-	 * Count all the events with the given options and return a map organizing
-	 * the counts in a hierarchy from date > eventtype> count
-	 *
-	 * @param startTime events before this time will be excluded (seconds from
-	 *                  unix epoch)
-	 * @param endTime   events at or after this time will be excluded (seconds
-	 *                  from unix epoch)
-	 * @param filter    only events that pass this filter will be counted
-	 * @param zoomLevel only events of this type or a subtype will be counted
-	 *                  and the counts will be organized into bins for each of
-	 *                  the subtypes of the given event type
-	 *
-	 * @return a map organizing the counts in a hierarchy from date > eventtype>
-	 *         count
-	 *
-	 * @throws org.sleuthkit.datamodel.TskCoreException
+	 * Counts the timeline events events that satisfy the given conditions.
+	 *
+	 * @param startTime         Events that occurred before this time are not
+	 *                          counted (units: seconds from UNIX epoch)
+	 * @param endTime           Events that occurred at or after this time are
+	 *                          not counted (seconds from unix epoch)
+	 * @param filter            Events that fall within the specified time range
+	 *                          are only ocunted if they pass this filter.
+	 * @param typeHierachyLevel Events that fall within the specified time range
+	 *                          and pass the specified filter asre only counted
+	 *                          if their types are at the specified level of the
+	 *                          event type hierarchy.
+	 *
+	 * @return The event counts for each event type at the specified level in
+	 *         the event types hierarchy.
+	 *
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
-	public Map<TimelineEventType, Long> countEventsByType(Long startTime, final Long endTime, TimelineFilter.RootFilter filter, TimelineEventType.TypeLevel zoomLevel) throws TskCoreException {
+	public Map<TimelineEventType, Long> countEventsByType(Long startTime, Long endTime, TimelineFilter.RootFilter filter, TimelineEventType.HierarchyLevel typeHierachyLevel) throws TskCoreException {
 		long adjustedEndTime = Objects.equals(startTime, endTime) ? endTime + 1 : endTime;
 		//do we want the base or subtype column of the databse
-		String typeColumn = typeColumnHelper(TimelineEventType.TypeLevel.SUB_TYPE.equals(zoomLevel));
+		String typeColumn = typeColumnHelper(TimelineEventType.HierarchyLevel.EVENT.equals(typeHierachyLevel));
 
 		String queryString = "SELECT count(DISTINCT tsk_events.event_id) AS count, " + typeColumn//NON-NLS
 				+ " FROM " + getAugmentedEventsTablesSQL(filter)//NON-NLS
 				+ " WHERE time >= " + startTime + " AND time < " + adjustedEndTime + " AND " + getSQLWhere(filter) // NON-NLS
 				+ " GROUP BY " + typeColumn; // NON-NLS
 
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement();
 				ResultSet results = stmt.executeQuery(queryString);) {
 			Map<TimelineEventType, Long> typeMap = new HashMap<>();
@@ -875,7 +902,7 @@ public Map<TimelineEventType, Long> countEventsByType(Long startTime, final Long
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting count of events from db: " + queryString, ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
 	}
 
@@ -897,16 +924,10 @@ private static TskCoreException newEventTypeMappingException(int eventTypeID) {
 	 *         columns required by the filters.
 	 */
 	static private String getAugmentedEventsTablesSQL(TimelineFilter.RootFilter filter) {
-		TimelineFilter.TagsFilter tagsFilter = filter.getTagsFilter();
-		boolean needsTags = tagsFilter != null && tagsFilter.hasSubFilters();
-
-		TimelineFilter.HashHitsFilter hashHitsFilter = filter.getHashHitsFilter();
-		boolean needsHashSets = hashHitsFilter != null && hashHitsFilter.hasSubFilters();
-
 		TimelineFilter.FileTypesFilter fileTypesFitler = filter.getFileTypesFilter();
 		boolean needsMimeTypes = fileTypesFitler != null && fileTypesFitler.hasSubFilters();
 
-		return getAugmentedEventsTablesSQL(needsTags, needsHashSets, needsMimeTypes);
+		return getAugmentedEventsTablesSQL(needsMimeTypes);
 	}
 
 	/**
@@ -917,46 +938,34 @@ static private String getAugmentedEventsTablesSQL(TimelineFilter.RootFilter filt
 	 * to the tsk_files table for mime_types if necessary. If all flags are
 	 * false, just return "events".
 	 *
-	 * @param needTags      True if the Sfilters require joining to the tags
-	 *                      tables.
-	 * @param needHashSets  True if the filters require joining to the hash set
-	 *                      sub query.
 	 * @param needMimeTypes True if the filters require joining to the tsk_files
 	 *                      table for the mime_type.
 	 *
-	 * @return An SQL expresion that produces an events table augmented with the
-	 *         columns required by the filters.
+	 * @return An SQL expression that produces an events table augmented with
+	 *         the columns required by the filters.
 	 */
-	static private String getAugmentedEventsTablesSQL(boolean needTags, boolean needHashSets, boolean needMimeTypes) {
-		return "( select event_id, time, tsk_event_descriptions.data_source_obj_id, file_obj_id, artifact_id, "
+	static private String getAugmentedEventsTablesSQL(boolean needMimeTypes) {
+		/*
+		 * Regarding the timeline event tables schema, note that several columns
+		 * in the tsk_event_descriptions table seem, at first glance, to be
+		 * attributes of events rather than their descriptions and would appear
+		 * to belong in tsk_events table instead. The rationale for putting the
+		 * data source object ID, content object ID, artifact ID and the flags
+		 * indicating whether or not the event source has a hash set hit or is
+		 * tagged were motivated by the fact that these attributes are identical
+		 * for each event in a set of file system file MAC time events. The
+		 * decision was made to avoid duplication and save space by placing this
+		 * data in the tsk_event-descriptions table.
+		 */
+		return "( SELECT event_id, time, tsk_event_descriptions.data_source_obj_id, content_obj_id, artifact_id, "
 				+ " full_description, med_description, short_description, tsk_events.event_type_id, super_type_id,"
 				+ " hash_hit, tagged "
-				+ (needTags ? ", tag_name_id, tag_id" : "")
-				+ (needHashSets ? ", hash_set_name" : "")
 				+ (needMimeTypes ? ", mime_type" : "")
 				+ " FROM tsk_events "
 				+ " JOIN tsk_event_descriptions ON ( tsk_event_descriptions.event_description_id = tsk_events.event_description_id)"
 				+ " JOIN tsk_event_types ON (tsk_events.event_type_id = tsk_event_types.event_type_id )  "
-				+ (needTags
-						? ("LEFT OUTER JOIN ("
-						+ "		SELECT  event_description_id, tag_name_id, tag_id "
-						+ "			FROM tsk_event_descriptions LEFT OUTER JOIN content_tags ON (content_tags.obj_id = tsk_event_descriptions.file_obj_id) "
-						+ "	UNION ALL "
-						+ "		SELECT  event_description_id,  tag_name_id, tag_id "
-						+ "			FROM tsk_event_descriptions LEFT OUTER JOIN blackboard_artifact_tags ON (blackboard_artifact_tags.artifact_id = tsk_event_descriptions.artifact_id)"
-						+ " ) AS tsk_event_tags ON (tsk_event_tags.event_description_id = tsk_events.event_description_id)")
-						: "")
-				+ (needHashSets ? " LEFT OUTER JOIN ( "
-						+ "		SELECT DISTINCT value_text AS hash_set_name, obj_id  "
-						+ "		FROM blackboard_artifacts"
-						+ "		JOIN blackboard_attributes ON (blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id)"
-						+ "		JOIN blackboard_artifact_types ON( blackboard_artifacts.artifact_type_id = blackboard_artifact_types.artifact_type_id)"
-						+ "		WHERE  blackboard_artifact_types.artifact_type_id = " + TSK_HASHSET_HIT.getTypeID()
-						+ "		AND blackboard_attributes.attribute_type_id = " + TSK_SET_NAME.getTypeID() + ") AS hash_set_hits"
-						+ "	ON ( tsk_event_descriptions.file_obj_id = hash_set_hits.obj_id)"
-						: "")
 				+ (needMimeTypes ? " LEFT OUTER JOIN tsk_files "
-						+ "	ON (tsk_event_descriptions.file_obj_id = tsk_files.obj_id)"
+						+ "	ON (tsk_event_descriptions.content_obj_id = tsk_files.obj_id)"
 						: "")
 				+ ")  AS tsk_events";
 	}
@@ -964,7 +973,7 @@ static private String getAugmentedEventsTablesSQL(boolean needTags, boolean need
 	/**
 	 * Convert a boolean to int with the mappings true => 1, false =>0
 	 *
-	 * @param value the bollean value to convert to an int.
+	 * @param value the boolean value to convert to an int.
 	 *
 	 * @return 1 if value is true, 0 if value is false.
 	 */
@@ -975,61 +984,62 @@ private static int booleanToInt(boolean value) {
 	private static boolean intToBoolean(int value) {
 		return value != 0;
 	}
-	
+
 	/**
-	 * Returns a list of TimelineEvents for the given filter and time range.
-	 * 
-	 * @param timeRange 
-	 * @param filter TimelineFilter.RootFilter for filtering data
-	 * 
-	 * @return	A list of TimelineEvents for given parameters, if filter is null 
-	 *			or times are invalid an empty list will be returned.
-	 * 
-	 * @throws TskCoreException 
+	 * Gets the timeline events that fall within a given time interval and
+	 * satisfy a given event filter.
+	 *
+	 * @param timeRange The time level.
+	 * @param filter    The event filter.
+	 *
+	 * @return	The list of events that fall within the specified interval and
+	 *         poass the specified filter.
+	 *
+	 * @throws TskCoreException If there is an error querying the case database.
 	 */
-	public List<TimelineEvent> getEvents(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException{
+	public List<TimelineEvent> getEvents(Interval timeRange, TimelineFilter.RootFilter filter) throws TskCoreException {
 		List<TimelineEvent> events = new ArrayList<>();
-		
+
 		Long startTime = timeRange.getStartMillis() / 1000;
 		Long endTime = timeRange.getEndMillis() / 1000;
 
 		if (Objects.equals(startTime, endTime)) {
 			endTime++; //make sure end is at least 1 millisecond after start
 		}
-		
+
 		if (filter == null) {
 			return events;
 		}
-		
+
 		if (endTime < startTime) {
 			return events;
 		}
 
 		//build dynamic parts of query
-        String querySql = "SELECT time, file_obj_id, data_source_obj_id, artifact_id, " // NON-NLS
-                          + "  event_id, " //NON-NLS
-                          + " hash_hit, " //NON-NLS
-                          + " tagged, " //NON-NLS
-                          + " event_type_id, super_type_id, "
-                          + " full_description, med_description, short_description " // NON-NLS
-                          + " FROM " + getAugmentedEventsTablesSQL(filter) // NON-NLS
-                          + " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + getSQLWhere(filter) // NON-NLS
-                          + " ORDER BY time"; // NON-NLS
-		
-		sleuthkitCase.acquireSingleUserCaseReadLock();
-		try (CaseDbConnection con = sleuthkitCase.getConnection();
+		String querySql = "SELECT time, content_obj_id, data_source_obj_id, artifact_id, " // NON-NLS
+				+ "  event_id, " //NON-NLS
+				+ " hash_hit, " //NON-NLS
+				+ " tagged, " //NON-NLS
+				+ " event_type_id, super_type_id, "
+				+ " full_description, med_description, short_description " // NON-NLS
+				+ " FROM " + getAugmentedEventsTablesSQL(filter) // NON-NLS
+				+ " WHERE time >= " + startTime + " AND time < " + endTime + " AND " + getSQLWhere(filter) // NON-NLS
+				+ " ORDER BY time"; // NON-NLS
+
+		caseDB.acquireSingleUserCaseReadLock();
+		try (CaseDbConnection con = caseDB.getConnection();
 				Statement stmt = con.createStatement();
 				ResultSet resultSet = stmt.executeQuery(querySql);) {
-			
-			 while (resultSet.next()) {
-                int eventTypeID = resultSet.getInt("event_type_id");
+
+			while (resultSet.next()) {
+				int eventTypeID = resultSet.getInt("event_type_id");
 				TimelineEventType eventType = getEventType(eventTypeID).orElseThrow(()
 						-> new TskCoreException("Error mapping event type id " + eventTypeID + "to EventType."));//NON-NLS
 
-				TimelineEvent event =  new TimelineEvent(
+				TimelineEvent event = new TimelineEvent(
 						resultSet.getLong("event_id"), // NON-NLS
 						resultSet.getLong("data_source_obj_id"), // NON-NLS
-						resultSet.getLong("file_obj_id"), // NON-NLS
+						resultSet.getLong("content_obj_id"), // NON-NLS
 						resultSet.getLong("artifact_id"), // NON-NLS
 						resultSet.getLong("time"), // NON-NLS
 						eventType,
@@ -1038,16 +1048,16 @@ public List<TimelineEvent> getEvents(Interval timeRange, TimelineFilter.RootFilt
 						resultSet.getString("short_description"), // NON-NLS
 						resultSet.getInt("hash_hit") != 0, //NON-NLS
 						resultSet.getInt("tagged") != 0);
-				
+
 				events.add(event);
-            }
-			
+			}
+
 		} catch (SQLException ex) {
 			throw new TskCoreException("Error getting events from db: " + querySql, ex); // NON-NLS
 		} finally {
-			sleuthkitCase.releaseSingleUserCaseReadLock();
+			caseDB.releaseSingleUserCaseReadLock();
 		}
-		
+
 		return events;
 	}
 
@@ -1058,7 +1068,7 @@ public List<TimelineEvent> getEvents(Interval timeRange, TimelineFilter.RootFilt
 	 *
 	 * @return column name to use depending on if we want base types or subtypes
 	 */
-	static String typeColumnHelper(final boolean useSubTypes) {
+	private static String typeColumnHelper(final boolean useSubTypes) {
 		return useSubTypes ? "event_type_id" : "super_type_id"; //NON-NLS
 	}
 
@@ -1082,26 +1092,14 @@ String getSQLWhere(TimelineFilter.RootFilter filter) {
 		return result;
 	}
 
-	String getDescriptionColumn(TimelineEvent.DescriptionLevel lod) {
-		switch (lod) {
-			case FULL:
-				return "full_description"; //NON-NLS
-			case MEDIUM:
-				return "med_description"; //NON-NLS
-			case SHORT:
-			default:
-				return "short_description"; //NON-NLS
-			}
-	}
-
-	String getTrueLiteral() {
-		switch (sleuthkitCase.getDatabaseType()) {
+	private String getTrueLiteral() {
+		switch (caseDB.getDatabaseType()) {
 			case POSTGRESQL:
 				return "TRUE";//NON-NLS
 			case SQLITE:
 				return "1";//NON-NLS
 			default:
-				throw new UnsupportedOperationException("Unsupported DB type: " + sleuthkitCase.getDatabaseType().name());//NON-NLS
+				throw new UnsupportedOperationException("Unsupported DB type: " + caseDB.getDatabaseType().name());//NON-NLS
 
 		}
 	}
@@ -1131,7 +1129,7 @@ public TimelineEvent getAddedEvent() {
 	 * @param <O> Output type.
 	 */
 	@FunctionalInterface
-	interface TSKCoreCheckedFunction<I, O> {
+	private interface TSKCoreCheckedFunction<I, O> {
 
 		O apply(I input) throws TskCoreException;
 	}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactHelperBase.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactHelperBase.java
new file mode 100644
index 0000000000000000000000000000000000000000..08e287e080a4dd6ea115f3867bec03f02bef81ff
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactHelperBase.java
@@ -0,0 +1,105 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel.blackboardutils;
+
+import java.util.Collection;
+import org.apache.commons.lang3.StringUtils;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.SleuthkitCase;
+
+/**
+ * A base class for classes that help ingest modules create artifacts.
+ *
+ */
+class ArtifactHelperBase {
+
+	private final SleuthkitCase caseDb;
+	private final AbstractFile srcAbstractFile;	// artifact source
+	private final String moduleName;			// module creating the artifacts
+
+	/**
+	 * Creates an artifact helper.
+	 *
+	 * @param caseDb     Sleuthkit case db
+	 * @param moduleName name module using the helper
+	 * @param srcFile    source file
+	 */
+	ArtifactHelperBase(SleuthkitCase caseDb, String moduleName, AbstractFile srcFile) {
+		this.moduleName = moduleName;
+		this.srcAbstractFile = srcFile;
+		this.caseDb = caseDb;
+	}
+
+	/**
+	 * Returns the source abstract file.
+	 *
+	 * @return Source abstract file.
+	 */
+	AbstractFile getAbstractFile() {
+		return this.srcAbstractFile;
+	}
+
+	/**
+	 * Returns the sleuthkit case.
+	 *
+	 * @return Sleuthkit case database.
+	 */
+	SleuthkitCase getSleuthkitCase() {
+		return this.caseDb;
+	}
+
+	/**
+	 * Returns module name.
+	 *
+	 * @return Module name.
+	 */
+	String getModuleName() {
+		return this.moduleName;
+	}
+
+	/**
+	 * Creates and adds an attribute of specified type to the given list, if the
+	 * attribute value is not empty or null.
+	 *
+	 * @param attributeType Attribute type.
+	 * @param attrValue     Attribute value.
+	 * @param attributes    List of attributes to add to.
+	 *
+	 */
+	void addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE attributeType, String attrValue, Collection<BlackboardAttribute> attributes) {
+		if (!StringUtils.isEmpty(attrValue)) {
+			attributes.add(new BlackboardAttribute(attributeType, getModuleName(), attrValue));
+		}
+	}
+
+	/**
+	 * Creates and adds an attribute of specified type to the given list, if the
+	 * attribute value is not 0.
+	 *
+	 * @param attributeType Attribute type.
+	 * @param attrValue     Attribute value.
+	 * @param attributes    List of attributes to add to.
+	 */
+	void addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE attributeType, long attrValue, Collection<BlackboardAttribute> attributes) {
+		if (attrValue > 0) {
+			attributes.add(new BlackboardAttribute(attributeType, getModuleName(), attrValue));
+		}
+	}
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java
new file mode 100644
index 0000000000000000000000000000000000000000..a8878b295a31c207475ed561107023202beee91b
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/ArtifactsHelper.java
@@ -0,0 +1,174 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel.blackboardutils;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TskCoreException;
+
+/**
+ * This class helps ingest modules create miscellaneous artifacts.
+ *
+ */
+public final class ArtifactsHelper extends ArtifactHelperBase {
+
+	/**
+	 * Creates an artifact helper for modules to create artifacts.
+	 *
+	 * @param caseDb     Sleuthkit case database.
+	 * @param moduleName Name of module using the helper.
+	 * @param srcFile    Source file for the artifacts.
+	 *
+	 */
+	public ArtifactsHelper(SleuthkitCase caseDb, String moduleName, AbstractFile srcFile) {
+		super(caseDb, moduleName, srcFile);
+	}
+
+	/**
+	 * Adds a TSK_GPS_TRACKPOINT artifact.
+	 *
+	 * @param latitude    Location latitude, required.
+	 * @param longitude   Location longitude, required.
+	 * @param timeStamp   Date/time trackpoint was recorded, can be 0 if not
+	 *                    available.
+	 * @param name        Trackpoint name, can be empty/null if not available.
+	 * @param programName Name of program that recorded the trackpoint, can be
+	 *                    empty or null if not available.
+	 *
+	 * @return GPS trackpoint artifact added
+	 *
+	 * @throws TskCoreException		If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addGPSLocation(double latitude, double longitude,
+			long timeStamp, String name, String programName) throws TskCoreException, BlackboardException {
+
+		return addGPSLocation(latitude, longitude, timeStamp, name, programName,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_GPS_TRACKPOINT artifact.
+	 *
+	 * @param latitude            Location latitude, required.
+	 * @param longitude           Location longitude, required.
+	 * @param timeStamp           Date/time the trackpoint was recorded, can be
+	 *                            0 if not available.
+	 * @param name                Trackpoint name, can be empty/null if not
+	 *                            available.
+	 * @param programName         Name of program that recorded the trackpoint,
+	 *                            can be empty or null if not available.
+	 * @param otherAttributesList Other attributes, can be an empty list of no
+	 *                            additional attributes.
+	 *
+	 * @return GPS trackpoint artifact added
+	 *
+	 * @throws TskCoreException		If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addGPSLocation(double latitude, double longitude, long timeStamp, String name, String programName,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact gpsTrackpointArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		gpsTrackpointArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_GPS_TRACKPOINT);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LATITUDE, getModuleName(), latitude));
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_GEO_LONGITUDE, getModuleName(), longitude));
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, timeStamp, attributes);
+
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, name, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes);
+
+		// add the attributes 
+		attributes.addAll(otherAttributesList);
+		gpsTrackpointArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(gpsTrackpointArtifact, getModuleName());
+
+		// return the artifact
+		return gpsTrackpointArtifact;
+	}
+
+	/**
+	 * Adds a TSK_INSTALLED_PROGRAM artifact.
+	 *
+	 * @param programName   Name of program, required.
+	 * @param dateInstalled Date/time of install, can be 0 if not available.
+	 *
+	 * @return Installed program artifact added.
+	 *
+	 * @throws TskCoreException		If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addInstalledProgram(String programName, long dateInstalled) throws TskCoreException, BlackboardException {
+		return addInstalledProgram(programName, dateInstalled,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_INSTALLED_PROGRAM artifact.
+	 *
+	 * @param programName         Name of program, required.
+	 * @param dateInstalled       Date/time of install, can be 0 if not
+	 *                            available.
+	 * @param otherAttributesList Additional attributes, can be an empty list if
+	 *                            no additional attributes.
+	 *
+	 * @return Installed program artifact added.
+	 *
+	 * @throws TskCoreException		If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addInstalledProgram(String programName, long dateInstalled,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact installedProgramArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		installedProgramArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_INSTALLED_PROG);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, getModuleName(), programName));
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, dateInstalled, attributes);
+
+		// add the attributes 
+		attributes.addAll(otherAttributesList);
+		installedProgramArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(installedProgramArtifact, getModuleName());
+
+		// return the artifact
+		return installedProgramArtifact;
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java
new file mode 100644
index 0000000000000000000000000000000000000000..267082450786a4184ba391162bb7f79a0061a867
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/CommunicationArtifactsHelper.java
@@ -0,0 +1,815 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel.blackboardutils;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.List;
+import org.apache.commons.lang3.StringUtils;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
+import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE;
+import org.sleuthkit.datamodel.Account;
+import org.sleuthkit.datamodel.AccountFileInstance;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.DataSource;
+import org.sleuthkit.datamodel.Relationship;
+import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TskCoreException;
+import org.sleuthkit.datamodel.TskDataException;
+
+/**
+ * Class to help ingest modules create communication artifacts. Communication
+ * artifacts includes contacts, messages, call logs.
+ *
+ * It creates a 'self' account {@link Account} - an account for the owner/user
+ * of the application being processed by the module. As an example, for a module
+ * analyzing Facebook application, this would be account associated with the
+ * unique Facebook user id of the device owner.
+ *
+ * In the absence of a 'self' account, a 'device' account may be used in it's
+ * place. A 'device' account is an account meant to represent the owner of the
+ * device and uses the unique device id as the unique account identifier.
+ *
+ * It also creates accounts for contacts, and sender/receivers of the messages,
+ * and calls.
+ *
+ * And it also creates relationships between the self account - and the contacts
+ * and sender/receiver accounts.
+ *
+ */
+public final class CommunicationArtifactsHelper extends ArtifactHelperBase {
+
+	/**
+	 * Enum for message read status
+	 */
+	public enum MessageReadStatus {
+
+		UNKNOWN("Unknown"), /// read status is unknown
+		UNREAD("Unread"), /// message has not been read
+		READ("Read");     /// message has been read
+
+		private final String msgReadStr;
+
+		MessageReadStatus(String readStatus) {
+			this.msgReadStr = readStatus;
+		}
+
+		public String getDisplayName() {
+			return msgReadStr;
+		}
+	}
+
+	/**
+	 * Enum for call/message direction.
+	 */
+	public enum CommunicationDirection {
+		UNKNOWN("Unknown"),
+		INCOMING("Incoming"),
+		OUTGOING("Outgoing");
+
+		private final String dirStr;
+
+		CommunicationDirection(String dir) {
+			this.dirStr = dir;
+		}
+
+		public String getDisplayName() {
+			return dirStr;
+		}
+	}
+
+	/**
+	 * Enum for call media type.
+	 */
+	public enum CallMediaType {
+		UNKNOWN("Unknown"),
+		AUDIO("Audio"), // Audio only call
+		VIDEO("Video");	// Video/multimedia call
+
+		private final String typeStr;
+
+		CallMediaType(String type) {
+			this.typeStr = type;
+		}
+
+		public String getDisplayName() {
+			return typeStr;
+		}
+	}
+
+	// 'self' account for the application being processed by the module. 
+	private final AccountFileInstance selfAccountInstance;
+
+	// Type of accounts to be created for the module using this helper.
+	private final Account.Type accountsType;
+
+	/**
+	 * Constructs a communications artifacts helper for the given source file.
+	 *
+	 * This is a constructor for modules that do not have a 'self' account, and
+	 * will use a 'Device' account in lieu.
+	 *
+	 * It creates a DeviceAccount instance to use as a self account.
+	 *
+	 * @param caseDb       Sleuthkit case db.
+	 * @param moduleName   Name of module using the helper.
+	 * @param srcFile      Source file being processed by the module.
+	 * @param accountsType Account type {@link Account.Type} created by this
+	 *                     module.
+	 *
+	 * @throws TskCoreException If there is an error creating the device
+	 *                          account.
+	 */
+	public CommunicationArtifactsHelper(SleuthkitCase caseDb,
+			String moduleName, AbstractFile srcFile, Account.Type accountsType) throws TskCoreException {
+
+		super(caseDb, moduleName, srcFile);
+
+		this.accountsType = accountsType;
+		this.selfAccountInstance = getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(Account.Type.DEVICE, ((DataSource) getAbstractFile().getDataSource()).getDeviceId(), moduleName, getAbstractFile());
+	}
+
+	/**
+	 * Constructs a AppDB parser helper for the given DB file.
+	 *
+	 * This constructor is for modules that have the application specific
+	 * account information for the device owner to create a 'self' account.
+	 *
+	 * It creates an account instance with specified type & id, and uses it as
+	 * the self account.
+	 *
+	 * @param caseDb          Sleuthkit case db.
+	 * @param moduleName      Name of module using the helper.
+	 * @param srcFile         Source file being processed by the module.
+	 * @param accountsType    Account type {@link Account.Type} created by this
+	 *                        module.
+	 * @param selfAccountType Self account type to be created for this module.
+	 * @param selfAccountId	  Account unique id for the self account.
+	 *
+	 * @throws TskCoreException	If there is an error creating the self account
+	 */
+	public CommunicationArtifactsHelper(SleuthkitCase caseDb, String moduleName, AbstractFile srcFile, Account.Type accountsType, Account.Type selfAccountType, String selfAccountId) throws TskCoreException {
+
+		super(caseDb, moduleName, srcFile);
+
+		this.accountsType = accountsType;
+		this.selfAccountInstance = getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(selfAccountType, selfAccountId, moduleName, getAbstractFile());
+	}
+
+	/**
+	 * Creates and adds a TSK_CONTACT artifact to the case, with specified
+	 * attributes. Also creates an account instance of specified type for the
+	 * contact with the specified ID.
+	 *
+	 * @param contactName       Contact name, required.
+	 * @param phoneNumber       Primary phone number for contact, may be empty
+	 *                          or null.
+	 * @param homePhoneNumber   Home phone number, may be empty or null.
+	 * @param mobilePhoneNumber Mobile phone number, may be empty or null.
+	 * @param emailAddr         Email address for the contact, may be empty or
+	 *                          null.
+	 *
+	 * At least one phone number or email address is required.
+	 *
+	 * @return Contact artifact created.
+	 *
+	 * @throws TskCoreException		  If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 *
+	 */
+	public BlackboardArtifact addContact(String contactName,
+			String phoneNumber, String homePhoneNumber,
+			String mobilePhoneNumber, String emailAddr) throws TskCoreException, BlackboardException {
+		return addContact(contactName, phoneNumber,
+				homePhoneNumber, mobilePhoneNumber, emailAddr,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Creates and adds a TSK_CONTACT artifact to the case, with specified
+	 * attributes. Also creates an account instance for the contact with the
+	 * specified ID.
+	 *
+	 * @param contactName          Contact name, required
+	 * @param phoneNumber          Primary phone number for contact, may be
+	 *                             empty or null.
+	 * @param homePhoneNumber      Home phone number, may be empty or null.
+	 * @param mobilePhoneNumber    Mobile phone number, may be empty or null.
+	 * @param emailAddr            Email address for the contact, may be empty
+	 *                             or null.
+	 *
+	 * At least one phone number or email address or an Id is required.
+	 * An Id may be passed in as a TSK_ID attribute in additionalAttributes.
+	 *
+	 * @param additionalAttributes Additional attributes for contact, may be an
+	 *                             empty list.
+	 *
+	 * @return contact artifact created.
+	 *
+	 * @throws TskCoreException		  If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 *
+	 */
+	public BlackboardArtifact addContact(String contactName,
+			String phoneNumber, String homePhoneNumber,
+			String mobilePhoneNumber, String emailAddr,
+			Collection<BlackboardAttribute> additionalAttributes) throws TskCoreException, BlackboardException {
+
+		// Contact name must be provided
+		if (StringUtils.isEmpty(contactName)) {
+			throw new IllegalArgumentException("Contact name must be specified.");
+		}
+
+		// check if the caller has included any phone/email/id in addtional attributes
+		boolean hasAnyIdAttribute = false;
+		if (additionalAttributes != null) {
+			for (BlackboardAttribute attr : additionalAttributes) {
+				if ((attr.getAttributeType().getTypeName().startsWith("TSK_PHONE")) ||
+					(attr.getAttributeType().getTypeName().startsWith("TSK_EMAIL"))	||
+					(attr.getAttributeType().getTypeName().startsWith("TSK_ID")))  {
+						hasAnyIdAttribute = true;
+						break;
+				}
+			}
+		}
+
+		// At least one phone number or email address 
+		// or an optional attribute with phone/email/id must be provided
+		if (StringUtils.isEmpty(phoneNumber) && StringUtils.isEmpty(homePhoneNumber)
+				&& StringUtils.isEmpty(mobilePhoneNumber) && StringUtils.isEmpty(emailAddr)
+				&& (!hasAnyIdAttribute)) {
+			throw new IllegalArgumentException("At least one phone number or email address or an id must be provided.");
+		}
+
+		BlackboardArtifact contactArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create TSK_CONTACT artifact
+		contactArtifact = getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_CONTACT);
+
+		// construct attributes
+		attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), contactName));
+
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, phoneNumber, attributes);
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_HOME, homePhoneNumber, attributes);
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_MOBILE, mobilePhoneNumber, attributes);
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_EMAIL, emailAddr, attributes);
+
+		// add attributes
+		attributes.addAll(additionalAttributes);
+		contactArtifact.addAttributes(attributes);
+
+		// create an account for each specified contact method, and a relationship with self account
+		createContactMethodAccountAndRelationship(Account.Type.PHONE, phoneNumber, contactArtifact, 0);
+		createContactMethodAccountAndRelationship(Account.Type.PHONE, homePhoneNumber, contactArtifact, 0);
+		createContactMethodAccountAndRelationship(Account.Type.PHONE, mobilePhoneNumber, contactArtifact, 0);
+		createContactMethodAccountAndRelationship(Account.Type.EMAIL, emailAddr, contactArtifact, 0);
+
+		// if the additional attribute list has any phone/email/id attributes, create accounts & relationships for those. 
+		if ((additionalAttributes != null) && hasAnyIdAttribute) {
+			for (BlackboardAttribute bba : additionalAttributes) {
+                if (bba.getAttributeType().getTypeName().startsWith("TSK_PHONE")) {
+					createContactMethodAccountAndRelationship(Account.Type.PHONE, bba.getValueString(), contactArtifact, 0);
+                } else if (bba.getAttributeType().getTypeName().startsWith("TSK_EMAIL")) {
+                    createContactMethodAccountAndRelationship(Account.Type.EMAIL, bba.getValueString(), contactArtifact, 0);
+                } else if (bba.getAttributeType().getTypeName().startsWith("TSK_ID")) {
+                    createContactMethodAccountAndRelationship(this.accountsType, bba.getValueString(), contactArtifact, 0);
+                } 
+            }
+		}
+		
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(contactArtifact, getModuleName());
+
+		return contactArtifact;
+	}
+
+	/**
+	 * Creates a contact's account instance of specified account type, if the
+	 * account id is not null/empty.
+	 *
+	 * Also creates a CONTACT relationship between the self account and the new
+	 * contact account.
+	 */
+	private void createContactMethodAccountAndRelationship(Account.Type accountType,
+			String accountUniqueID, BlackboardArtifact sourceArtifact,
+			long dateTime) throws TskCoreException {
+
+		// Find/Create an account instance for each of the contact method
+		// Create a relationship between selfAccount and contactAccount
+		if (!StringUtils.isEmpty(accountUniqueID)) {
+			AccountFileInstance contactAccountInstance = createAccountInstance(accountType, accountUniqueID);
+
+			// Create a relationship between self account and the contact account
+			try {
+				getSleuthkitCase().getCommunicationsManager().addRelationships(selfAccountInstance,
+						Collections.singletonList(contactAccountInstance), sourceArtifact, Relationship.Type.CONTACT, dateTime);
+			} catch (TskDataException ex) {
+				throw new TskCoreException(String.format("Failed to create relationship between account = %s and account = %s.",
+						selfAccountInstance.getAccount(), contactAccountInstance.getAccount()), ex);
+			}
+		}
+	}
+
+	/**
+	 * Creates an account file instance{@link AccountFileInstance} associated
+	 * with the DB file.
+	 *
+	 * @param accountType     Type of account to create.
+	 * @param accountUniqueID Unique id for the account.
+	 *
+	 * @return Account instance created.
+	 *
+	 * @throws TskCoreException If there is an error creating the account
+	 *                          instance.
+	 */
+	private AccountFileInstance createAccountInstance(Account.Type accountType, String accountUniqueID) throws TskCoreException {
+		return getSleuthkitCase().getCommunicationsManager().createAccountFileInstance(accountType, accountUniqueID, getModuleName(), getAbstractFile());
+	}
+
+	/**
+	 * Adds a TSK_MESSAGE artifact.
+	 *
+	 * Also creates an account instance for the sender/receiver, and creates a
+	 * relationship between the self account and the sender/receiver account.
+	 *
+	 * @param messageType Message type, required.
+	 * @param direction   Message direction, UNKNOWN if not available.
+	 * @param senderId    Sender address id, may be null.
+	 * @param recipientId Recipient id, may be null.
+	 * @param dateTime    Date/time of message, 0 if not available.
+	 * @param readStatus  Message read status, UNKNOWN if not available.
+	 * @param subject     Message subject, may be empty or null.
+	 * @param messageText Message body, may be empty or null.
+	 * @param threadId    Message thread id, may be empty or null.
+	 *
+	 * @return Message artifact.
+	 *
+	 * @throws TskCoreException		  If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addMessage(
+			String messageType,
+			CommunicationDirection direction,
+			String senderId,
+			String recipientId,
+			long dateTime, MessageReadStatus readStatus,
+			String subject, String messageText, String threadId) throws TskCoreException, BlackboardException {
+		return addMessage(messageType, direction,
+				senderId, recipientId, dateTime, readStatus,
+				subject, messageText, threadId,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_MESSAGE artifact.
+	 *
+	 * Also creates an account instance for the sender/receiver, and creates a
+	 * relationship between the self account and the sender/receiver account.
+	 *
+	 * @param messageType         Message type, required.
+	 * @param direction           Message direction, UNKNOWN if not available.
+	 * @param senderId            Sender id, may be null.
+	 * @param recipientId         Recipient id, may be null.
+	 * @param dateTime            Date/time of message, 0 if not available.
+	 * @param readStatus          Message read status, UNKNOWN if not available.
+	 * @param subject             Message subject, may be empty or null.
+	 * @param messageText         Message body, may be empty or null.
+	 * @param threadId            Message thread id, may be empty or null.
+	 * @param otherAttributesList Additional attributes, may be an empty list.
+	 *
+	 * @return Message artifact.
+	 *
+	 * @throws TskCoreException		  If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addMessage(String messageType,
+			CommunicationDirection direction,
+			String senderId,
+			String recipientId,
+			long dateTime, MessageReadStatus readStatus, String subject,
+			String messageText, String threadId,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		return addMessage(messageType, direction,
+				senderId,
+				Arrays.asList(recipientId),
+				dateTime, readStatus,
+				subject, messageText, threadId,
+				otherAttributesList);
+	}
+
+	/**
+	 * Adds a TSK_MESSAGE artifact.
+	 *
+	 * Also creates an account instance for the sender/receiver, and creates a
+	 * relationship between the self account and the sender/receiver accounts.
+	 *
+	 *
+	 * @param messageType      Message type, required.
+	 * @param direction        Message direction, UNKNOWN if not available.
+	 * @param senderId         Sender id, may be null.
+	 * @param recipientIdsList Recipient ids list, may be null or empty list.
+	 * @param dateTime         Date/time of message, 0 if not available.
+	 * @param readStatus       Message read status, UNKNOWN if not available.
+	 * @param subject          Message subject, may be empty or null.
+	 * @param messageText      Message body, may be empty or null.
+	 * @param threadId         Message thread id, may be empty or null.
+	 *
+	 * @return Message artifact.
+	 *
+	 * @throws TskCoreException		  If there is an error creating the artifact.
+	 * @throws BlackboardException	If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addMessage(String messageType,
+			CommunicationDirection direction,
+			String senderId,
+			List<String> recipientIdsList,
+			long dateTime, MessageReadStatus readStatus,
+			String subject, String messageText, String threadId) throws TskCoreException, BlackboardException {
+		return addMessage(messageType, direction,
+				senderId, recipientIdsList,
+				dateTime, readStatus,
+				subject, messageText, threadId,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_MESSAGE artifact.
+	 *
+	 * Also creates accounts for the sender/receivers, and creates relationships
+	 * between the sender/receivers account.
+	 *
+	 * @param messageType         Message type, required.
+	 * @param direction           Message direction, UNKNOWN if not available.
+	 * @param senderId            Sender id, may be null.
+	 * @param recipientIdsList    Recipient list, may be null or empty an list.
+	 * @param dateTime            Date/time of message, 0 if not available.
+	 * @param readStatus          Message read status, UNKNOWN if not available.
+	 * @param subject             Message subject, may be empty or null.
+	 * @param messageText         Message body, may be empty or null.
+	 * @param threadId            Message thread id, may be empty or null.
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return Message artifact.
+	 *
+	 * @throws TskCoreException    If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addMessage(String messageType,
+			CommunicationDirection direction,
+			String senderId,
+			List<String> recipientIdsList,
+			long dateTime, MessageReadStatus readStatus,
+			String subject, String messageText,
+			String threadId,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		// Created message artifact.  
+		BlackboardArtifact msgArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create TSK_MESSAGE artifact
+		msgArtifact = getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_MESSAGE);
+
+		// construct attributes
+		attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_MESSAGE_TYPE, getModuleName(), messageType));
+		addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME, dateTime, attributes);
+
+		addMessageReadStatusIfKnown(readStatus, attributes);
+		addCommDirectionIfKnown(direction, attributes);
+
+		// set sender attribute and create sender account
+		AccountFileInstance senderAccountInstance;
+		if (StringUtils.isEmpty(senderId)) {
+			senderAccountInstance = selfAccountInstance;
+			addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, selfAccountInstance.getAccount().getTypeSpecificID(), attributes);
+		} else {
+			senderAccountInstance = createAccountInstance(accountsType, senderId);
+			addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, senderId, attributes);
+		}
+
+		// set recipient attribute and create recipient accounts
+		List<AccountFileInstance> recipientAccountsList = new ArrayList();
+		String recipientsStr = "";
+		if (recipientIdsList != null) {
+			for (String recipient : recipientIdsList) {
+				if (!StringUtils.isEmpty(recipient)) {
+					recipientAccountsList.add(createAccountInstance(accountsType, recipient));
+				}
+			}
+			// Create a comma separated string of recipients
+			recipientsStr = addressListToString(recipientIdsList);
+			addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, recipientsStr, attributes);
+		}
+
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_SUBJECT, subject, attributes);
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_TEXT, messageText, attributes);
+		addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_THREAD_ID, threadId, attributes);
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		msgArtifact.addAttributes(attributes);
+
+		// create sender/recipient relationships  
+		try {
+			getSleuthkitCase().getCommunicationsManager().addRelationships(senderAccountInstance,
+					recipientAccountsList, msgArtifact, Relationship.Type.MESSAGE, dateTime);
+		} catch (TskDataException ex) {
+			throw new TskCoreException(String.format("Failed to create Message relationships between sender account = %s and recipients = %s.",
+					senderAccountInstance.getAccount().getTypeSpecificID(), recipientsStr), ex);
+		}
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(msgArtifact, getModuleName());
+
+		// return the artifact
+		return msgArtifact;
+	}
+
+	/**
+	 * Adds a TSK_CALLLOG artifact.
+	 *
+	 * Also creates an account instance for the caller/callee, and creates a
+	 * relationship between the self account and the caller account as well
+	 * between the self account and the callee account.
+	 *
+	 * @param direction     Call direction, UNKNOWN if not available.
+	 * @param callerId      Caller id, may be null.
+	 * @param calleeId      Callee id, may be null.
+	 *
+	 * At least one of the two must be provided - the caller Id, or a callee id.
+	 *
+	 * @param startDateTime Start date/time, 0 if not available.
+	 * @param endDateTime   End date/time, 0 if not available.
+	 * @param mediaType     Media type.
+	 *
+	 * @return Call log artifact.
+	 *
+	 * @throws TskCoreException    If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addCalllog(CommunicationDirection direction,
+			String callerId, String calleeId,
+			long startDateTime, long endDateTime, CallMediaType mediaType) throws TskCoreException, BlackboardException {
+		return addCalllog(direction, callerId, calleeId,
+				startDateTime, endDateTime, mediaType,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_CALLLOG artifact.
+	 *
+	 * Also creates an account instance for the caller/callee, and creates a
+	 * relationship between the self account and the caller account as well
+	 * between the self account and the callee account.
+	 *
+	 * @param direction           Call direction, UNKNOWN if not available.
+	 * @param callerId            Caller id, may be null.
+	 * @param calleeId            Callee id, may be null.
+	 *
+	 * At least one of the two must be provided - the caller Id, or a callee id.
+	 *
+	 * @param startDateTime       Start date/time, 0 if not available.
+	 * @param endDateTime         End date/time, 0 if not available.
+	 * @param mediaType           Media type.
+	 * @param otherAttributesList Other attributes.
+	 *
+	 * @return Call log artifact.
+	 *
+	 * @throws TskCoreException    If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addCalllog(CommunicationDirection direction,
+			String callerId,
+			String calleeId,
+			long startDateTime, long endDateTime,
+			CallMediaType mediaType,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+		return addCalllog(direction,
+				callerId,
+				Arrays.asList(calleeId),
+				startDateTime, endDateTime,
+				mediaType,
+				otherAttributesList);
+	}
+
+	/**
+	 * Adds a TSK_CALLLOG artifact.
+	 *
+	 * Also creates an account instance for the caller/callees, and creates a
+	 * relationship between the self account and the caller account as well
+	 * between the self account and each callee account.
+	 *
+	 * @param direction     Call direction, UNKNOWN if not available.
+	 * @param callerId      Caller id, may be null.
+	 * @param calleeIdsList Callee list, may be an empty list.
+	 *
+	 * At least one of the two must be provided - the caller Id, or a callee id.
+	 *
+	 * @param startDateTime Start date/time, 0 if not available.
+	 * @param endDateTime   End date/time, 0 if not available.
+	 * @param mediaType     Call media type, UNKNOWN if not available.
+	 *
+	 * @return Call log artifact.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addCalllog(CommunicationDirection direction,
+			String callerId,
+			Collection<String> calleeIdsList,
+			long startDateTime, long endDateTime,
+			CallMediaType mediaType) throws TskCoreException, BlackboardException {
+
+		return addCalllog(direction, callerId, calleeIdsList,
+				startDateTime, endDateTime,
+				mediaType,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_CALLLOG artifact.
+	 *
+	 * Also creates an account instance for the caller and each of the callees,
+	 * and creates relationships between caller and callees.
+	 *
+	 * @param direction           Call direction, UNKNOWN if not available.
+	 * @param callerId            Caller id, required for incoming call.
+	 * @param calleeIdsList       Callee ids list, required for an outgoing
+	 *                            call.
+	 *
+	 * At least one of the two must be provided - the caller Id, or a callee id.
+	 *
+	 * @param startDateTime       Start date/time, 0 if not available.
+	 * @param endDateTime         End date/time, 0 if not available.
+	 * @param mediaType           Call media type, UNKNOWN if not available.
+	 * @param otherAttributesList other attributes, can be an empty list
+	 *
+	 * @return Call log artifact.
+	 *
+	 * @throws TskCoreException    If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addCalllog(CommunicationDirection direction,
+			String callerId,
+			Collection<String> calleeIdsList,
+			long startDateTime, long endDateTime,
+			CallMediaType mediaType,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		// Either caller id or a callee id must be provided.
+		if (StringUtils.isEmpty(callerId) && (isEffectivelyEmpty(calleeIdsList))) {
+			throw new IllegalArgumentException("Either a caller id, or at least one callee id must be provided for a call log.");
+		}
+
+		BlackboardArtifact callLogArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// Create TSK_CALLLOG artifact
+		callLogArtifact = getAbstractFile().newArtifact(ARTIFACT_TYPE.TSK_CALLLOG);
+
+		// Add basic attributes 
+		addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME_START, startDateTime, attributes);
+		addAttributeIfNotZero(ATTRIBUTE_TYPE.TSK_DATETIME_END, endDateTime, attributes);
+		addCommDirectionIfKnown(direction, attributes);
+
+		// set FROM attribute and create a caller account
+		AccountFileInstance callerAccountInstance;
+		if (StringUtils.isEmpty(callerId)) {
+			// for an Outgoing call, if no caller is specified, assume self account is the caller
+			if (direction == CommunicationDirection.OUTGOING) {
+				callerAccountInstance = selfAccountInstance;
+				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, selfAccountInstance.getAccount().getTypeSpecificID(), attributes);
+			} else { // incoming call without a caller id
+				throw new IllegalArgumentException("Caller Id not provided for incoming call.");
+			}
+		} else {
+			callerAccountInstance = createAccountInstance(accountsType, callerId);
+			addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_FROM, callerId, attributes);
+		}
+
+		// Create a comma separated string of callee
+		List<AccountFileInstance> recipientAccountsList = new ArrayList();
+		String calleesStr = "";
+		if (! isEffectivelyEmpty(calleeIdsList)) {
+			calleesStr = addressListToString(calleeIdsList);
+			addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, calleesStr, attributes);
+
+			for (String callee : calleeIdsList) {
+				if (!StringUtils.isEmpty(callee)) {
+					recipientAccountsList.add(createAccountInstance(accountsType, callee));
+				}
+			}
+		} else {
+			// For incoming call, if no callee specified, assume self account is callee
+			if (direction == CommunicationDirection.INCOMING) {
+				addAttributeIfNotNull(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER_TO, this.selfAccountInstance.getAccount().getTypeSpecificID(), attributes);
+				recipientAccountsList.add(this.selfAccountInstance);
+			} else { // outgoing call without any callee
+				throw new IllegalArgumentException("Callee not provided for an outgoing call.");
+			}
+		}
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		callLogArtifact.addAttributes(attributes);
+
+		// create relationships between caller/callees
+		try {
+			getSleuthkitCase().getCommunicationsManager().addRelationships(callerAccountInstance,
+					recipientAccountsList, callLogArtifact, Relationship.Type.CALL_LOG, startDateTime);
+		} catch (TskDataException ex) {
+			throw new TskCoreException(String.format("Failed to create Call log relationships between caller account = %s and callees = %s.",
+					callerAccountInstance.getAccount(), calleesStr), ex);
+		}
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(callLogArtifact, getModuleName());
+
+		// return the artifact
+		return callLogArtifact;
+	}
+
+	/**
+	 * Converts a list of ids into a single comma separated string.
+	 */
+	private String addressListToString(Collection<String> addressList) {
+
+		String toAddresses = "";
+		if (addressList != null && (!addressList.isEmpty())) {
+			StringBuilder toAddressesSb = new StringBuilder();
+			for (String address : addressList) {
+				if (!StringUtils.isEmpty(address)) {
+					toAddressesSb = toAddressesSb.length() > 0 ? toAddressesSb.append(", ").append(address) : toAddressesSb.append(address);
+				}
+			}
+			toAddresses = toAddressesSb.toString();
+		}
+
+		return toAddresses;
+	}
+
+	/**
+	 * Checks if the given list of ids has at least one non-null non-blank id.
+	 *
+	 * @param addressList List of string ids.
+	 *
+	 * @return false if the list has at least one non-null non-blank id,
+	 *         otherwise true.
+	 *
+	 */
+	private boolean isEffectivelyEmpty(Collection<String> idList) {
+
+		if (idList == null || idList.isEmpty()) {
+			return true;
+		}
+		
+		for (String id: idList) {
+			if (!StringUtils.isEmpty(id))
+				return false;
+		}
+		
+		return true;
+				
+	}
+	/**
+	 * Adds communication direction attribute to the list, if it is not unknown.
+	 */
+	private void addCommDirectionIfKnown(CommunicationDirection direction, Collection<BlackboardAttribute> attributes) {
+		if (direction != CommunicationDirection.UNKNOWN) {
+			attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DIRECTION, getModuleName(), direction.getDisplayName()));
+		}
+	}
+
+	/**
+	 * Adds message read status attribute to the list, if it is not unknown.
+	 */
+	private void addMessageReadStatusIfKnown(MessageReadStatus readStatus, Collection<BlackboardAttribute> attributes) {
+		if (readStatus != MessageReadStatus.UNKNOWN) {
+			attributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_READ_STATUS, getModuleName(), (readStatus == MessageReadStatus.READ) ? 1 : 0));
+		}
+	}
+
+}
diff --git a/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java
new file mode 100644
index 0000000000000000000000000000000000000000..5eb743af6a792a3d54e4879a725512ecc4ee060b
--- /dev/null
+++ b/bindings/java/src/org/sleuthkit/datamodel/blackboardutils/WebBrowserArtifactsHelper.java
@@ -0,0 +1,538 @@
+/*
+ * Sleuth Kit Data Model
+ *
+ * Copyright 2019 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.datamodel.blackboardutils;
+
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.StringTokenizer;
+import org.apache.commons.lang3.StringUtils;
+import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.Blackboard.BlackboardException;
+import org.sleuthkit.datamodel.BlackboardArtifact;
+import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.SleuthkitCase;
+import org.sleuthkit.datamodel.TskCoreException;
+
+/**
+ * Class to help ingest modules create Web Browser artifacts.
+ *
+ * These include bookmarks, cookies, downloads, history, and web form
+ * autofill data.
+ *
+ */
+public final class WebBrowserArtifactsHelper extends ArtifactHelperBase {
+
+	/**
+	 * Creates a WebBrowserArtifactsHelper.
+	 *
+	 * @param caseDb     Sleuthkit case db.
+	 * @param moduleName Name of module using the helper.
+	 * @param srcFile    Source file being processed by the module.
+	 *
+	 */
+	public WebBrowserArtifactsHelper(SleuthkitCase caseDb, String moduleName, AbstractFile srcFile) {
+		super(caseDb, moduleName, srcFile);
+	}
+
+	/**
+	 * Adds a TSK_WEB_BOOKMARK artifact.
+	 *
+	 * @param url          Bookmark URL, required.
+	 * @param title        Bookmark title, may be empty/null.
+	 * @param creationTime Date/time created, may be 0 if not available.
+	 * @param progName     Application/program that created bookmark, may be
+	 *                     empty/null.
+	 *
+	 * @return Bookmark artifact.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebBookmark(String url, String title, long creationTime, String progName) throws TskCoreException, BlackboardException {
+		return addWebBookmark(url, title, creationTime, progName,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_WEB_BOOKMARK artifact.
+	 *
+	 * @param url                 Bookmark URL, required.
+	 * @param title               Bookmark title, may be empty/null.
+	 * @param creationTime        Date/time created, may be 0 if not available.
+	 * @param progName            Application/program that created bookmark, may
+	 *                            be empty/null.
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return Bookmark artifact.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebBookmark(String url, String title, long creationTime, String progName,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact bookMarkArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		bookMarkArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_BOOKMARK);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, creationTime, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, title, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, progName, attributes);
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		bookMarkArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(bookMarkArtifact, getModuleName());
+
+		// return the artifact
+		return bookMarkArtifact;
+	}
+
+	/**
+	 * Adds a TSK_WEB_COOKIE artifact.
+	 *
+	 * @param url          Url of the site that created the cookie, required.
+	 * @param creationTime Create time of cookie, may be 0 if not available.
+	 * @param name         Cookie name, may be empty or null.
+	 * @param value        Cookie value, may be empty or null.
+	 * @param programName  Name of the application/program that created the
+	 *                     cookie, may be empty or null.
+	 *
+	 * @return WebCookie artifact
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebCookie(String url, long creationTime,
+			String name, String value, String programName) throws TskCoreException, BlackboardException {
+
+		return addWebCookie(url, creationTime, name, value, programName,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_WEB_COOKIE artifact.
+	 *
+	 * @param url                 Url of the site that created the cookie,
+	 *                            required.
+	 * @param creationTime        Create time of cookie, may be 0 if not
+	 *                            available.
+	 * @param name                Cookie name, may be empty or null.
+	 * @param value               Cookie value, may be empty or null.
+	 * @param programName         Name of the application/program that created
+	 *                            the cookie, may be empty or null.
+	 *
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return WebCookie artifact
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebCookie(String url,
+			long creationTime, String name, String value, String programName,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact cookieArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		cookieArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_COOKIE);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME, creationTime, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, name, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, value, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes);
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		cookieArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(cookieArtifact, getModuleName());
+
+		// return the artifact
+		return cookieArtifact;
+	}
+
+	/**
+	 * Adds a TSK_WEB_DOWNNLOAD artifact.
+	 *
+	 * @param url         URL downloaded from, required.
+	 * @param startTime   Date/time downloaded, 0 if not available.
+	 * @param path        Path of downloaded file, required.
+	 * @param programName Program that initiated the download, may be empty or
+	 *                    null.
+	 *
+	 * @return Web download artifact created.
+	 *
+	 * @throws TskCoreException    If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebDownload(String url, long startTime, String path, String programName) throws TskCoreException, BlackboardException {
+		return addWebDownload(path, startTime, url, programName, Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_WEB_DOWNNLOAD artifact.
+	 *
+	 * @param url                 URL downloaded from, required.
+	 * @param startTime           Date/time downloaded, 0 if not available.
+	 * @param path                Path of downloaded file, required.
+	 * @param programName         Program that initiated the download, may be
+	 *                            empty or null.
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return Web download artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebDownload(String url, long startTime, String path, String programName,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact webDownloadArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// reate artifact
+		webDownloadArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_DOWNLOAD);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PATH, getModuleName(), path));
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, startTime, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes);
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		webDownloadArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(webDownloadArtifact, getModuleName());
+
+		// return the artifact
+		return webDownloadArtifact;
+	}
+
+	/**
+	 * Adds a TSK_WEB_FORM_AUTOFILL artifact.
+	 *
+	 * @param personName     Person name, required.
+	 * @param email          Email address, may be empty or null.
+	 * @param phoneNumber    Phone number, may be empty or null.
+	 * @param mailingAddress Mailing address, may be empty or null.
+	 * @param creationTime   Creation time, may be 0 if not available.
+	 * @param accessTime     Last access time, may be 0 if not available.
+	 * @param count          Use count, may be 0 if not available.
+	 *
+	 * @return Web form address artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebFormAddress(String personName, String email,
+			String phoneNumber, String mailingAddress,
+			long creationTime, long accessTime, int count) throws TskCoreException, BlackboardException {
+		return addWebFormAddress(personName, email, phoneNumber,
+				mailingAddress, creationTime, accessTime, count,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_WEB_FORM_ADDRESS artifact.
+	 *
+	 * @param personName          Person name, required.
+	 * @param email               Email address, may be empty or null.
+	 * @param phoneNumber         Phone number, may be empty or null.
+	 * @param mailingAddress      Mailing address, may be empty or null.
+	 * @param creationTime        Creation time, may be 0 if not available.
+	 * @param accessTime          Last access time, may be 0 if not available.
+	 * @param count               Use count, may be 0 if not available.
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return Web form address artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebFormAddress(String personName, String email,
+			String phoneNumber, String mailingAddress,
+			long creationTime, long accessTime, int count,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact webFormAddressArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		webFormAddressArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_ADDRESS);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), personName));
+
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_EMAIL, email, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER, phoneNumber, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_LOCATION, mailingAddress, attributes);
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, creationTime, attributes);
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, accessTime, attributes);
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, count, attributes);
+
+		// add artifact
+		attributes.addAll(otherAttributesList);
+		webFormAddressArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(webFormAddressArtifact, getModuleName());
+
+		// return the artifact
+		return webFormAddressArtifact;
+	}
+
+	/**
+	 * Adds a TSK_WEB_FORM_AUTOFILL artifact.
+	 *
+	 * @param name         Name of autofill field, required.
+	 * @param value        Value of autofill field, required.
+	 * @param creationTime Create date/time, may be 0 if not available.
+	 * @param accessTime   Last access date/time, may be 0 if not available.
+	 * @param count        Count of times used, may be 0 if not available.
+	 *
+	 * @return Web form autofill artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebFormAutofill(String name, String value,
+			long creationTime, long accessTime, int count) throws TskCoreException, BlackboardException {
+		return addWebFormAutofill(name, value, creationTime, accessTime, count,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a TSK_WEB_FORM_AUTOFILL artifact.
+	 *
+	 * @param name                Name of autofill field, required.
+	 * @param value               Value of autofill field, required.
+	 * @param creationTime        Create date/time, may be 0 if not available.
+	 * @param accessTime          Last access date/time, may be 0 if not
+	 *                            available.
+	 * @param count               Count of times used, may be 0 if not
+	 *                            available.
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return Web form autofill artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebFormAutofill(String name, String value,
+			long creationTime, long accessTime, int count,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+		BlackboardArtifact webFormAutofillArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		webFormAutofillArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_FORM_AUTOFILL);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME, getModuleName(), name));
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_VALUE, getModuleName(), value));
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_CREATED, creationTime, attributes);
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, accessTime, attributes);
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_COUNT, count, attributes);
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		webFormAutofillArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(webFormAutofillArtifact, getModuleName());
+
+		// return the artifact
+		return webFormAutofillArtifact;
+	}
+
+	/**
+	 * Adds a Web History artifact.
+	 *
+	 * @param url          Url visited, required.
+	 * @param accessTime   Last access time, may be 0 if not available.
+	 * @param referrer     Referrer, may be empty or null.
+	 * @param title        Website title, may be empty or null.
+	 * @param programName  Application/program recording the history, may be
+	 *                     empty or null.
+	 *
+	 * @return Web history artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebHistory(String url, long accessTime,
+			String referrer, String title, String programName) throws TskCoreException, BlackboardException {
+		return addWebHistory(url, accessTime, referrer, title, programName,
+				Collections.emptyList());
+	}
+
+	/**
+	 * Adds a Web History artifact.
+	 *
+	 * @param url                 Url visited, required.
+	 * @param accessTime          Last access time, may be 0 if not available.
+	 * @param referrer            Referrer, may be empty or null.
+	 * @param title               Website title, may be empty or null.
+	 * @param programName         Application/program recording the history, may
+	 *                            be empty or null.
+	 * @param otherAttributesList Other attributes, may be an empty list.
+	 *
+	 * @return Web history artifact created.
+	 *
+	 * @throws TskCoreException	   If there is an error creating the artifact.
+	 * @throws BlackboardException If there is a problem posting the artifact.
+	 */
+	public BlackboardArtifact addWebHistory(String url, long accessTime,
+			String referrer, String title, String programName,
+			Collection<BlackboardAttribute> otherAttributesList) throws TskCoreException, BlackboardException {
+
+		BlackboardArtifact webHistoryArtifact;
+		Collection<BlackboardAttribute> attributes = new ArrayList<>();
+
+		// create artifact
+		webHistoryArtifact = getAbstractFile().newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_WEB_HISTORY);
+
+		// construct attributes 
+		attributes.add(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL, getModuleName(), url));
+
+		addAttributeIfNotZero(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED, accessTime, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_TITLE, title, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_REFERRER, referrer, attributes);
+
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME, programName, attributes);
+		addAttributeIfNotNull(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN, extractDomain(url), attributes);
+
+		// add attributes to artifact
+		attributes.addAll(otherAttributesList);
+		webHistoryArtifact.addAttributes(attributes);
+
+		// post artifact 
+		getSleuthkitCase().getBlackboard().postArtifact(webHistoryArtifact, getModuleName());
+
+		// return the artifact
+		return webHistoryArtifact;
+	}
+
+	// TBD: this is duplicated in Autopsy. 
+	// We should move this to new Util class in TSK, and have Autopsy delegate to it.
+	/**
+	 * Attempt to extract the domain from a URL. Will start by using the
+	 * built-in URL class, and if that fails will try to extract it manually.
+	 *
+	 * @param urlString The URL to extract the domain from
+	 *
+	 * @return empty string if no domain name was found
+	 */
+	private static String extractDomain(String urlString) {
+		if (urlString == null) {
+			return "";
+		}
+		String result;
+
+		try {
+			URL url = new URL(urlString);
+			result = url.getHost();
+		} catch (MalformedURLException ex) {
+			// not a valid URL - we will try to extract it ourselves
+			result = null;
+		}
+
+		//was not a valid URL, try a less picky method
+		if (result == null || StringUtils.isBlank(result)) {
+			return getBaseDomain(urlString);
+		}
+		return result;
+	}
+
+	/**
+	 * Attempt to manually extract the domain from a URL.
+	 *
+	 * @param url
+	 *
+	 * @return empty string if no domain could be found
+	 */
+	private static String getBaseDomain(String url) {
+		String host;
+
+		//strip protocol
+		String cleanUrl = url.replaceFirst(".*:\\/\\/", "");
+
+		//strip after slashes
+		String dirToks[] = cleanUrl.split("\\/");
+		if (dirToks.length > 0) {
+			host = dirToks[0];
+		} else {
+			host = cleanUrl;
+		}
+
+		//get the domain part from host (last 2)
+		StringTokenizer tok = new StringTokenizer(host, ".");
+		StringBuilder hostB = new StringBuilder();
+		int toks = tok.countTokens();
+
+		for (int count = 0; count < toks; ++count) {
+			String part = tok.nextToken();
+			int diff = toks - count;
+			if (diff < 3) {
+				hostB.append(part);
+			}
+			if (diff == 2) {
+				hostB.append('.');
+			}
+		}
+
+		String base = hostB.toString();
+		// verify there are no special characters in there
+		if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) {
+			return "";
+		}
+
+		//verify that the base domain actually has a '.', details JIRA-4609
+		if (!base.contains(".")) {
+			return "";
+		}
+
+		return base;
+	}
+}
diff --git a/bindings/java/test/org/sleuthkit/datamodel/timeline/EventTypeFilterTest.java b/bindings/java/test/org/sleuthkit/datamodel/timeline/EventTypeFilterTest.java
index a581c2ee29b77ac84cdf74b37ef136eacb68b93f..c4b700ba5516a3fcb65ad8094d6d8ed99b307cce 100644
--- a/bindings/java/test/org/sleuthkit/datamodel/timeline/EventTypeFilterTest.java
+++ b/bindings/java/test/org/sleuthkit/datamodel/timeline/EventTypeFilterTest.java
@@ -35,11 +35,11 @@ public class EventTypeFilterTest {
 	public void testGetEventType() {
 		System.out.println("getEventType");
 		EventTypeFilter instance = new EventTypeFilter(TimelineEventType.ROOT_EVENT_TYPE);
-		assertEquals(TimelineEventType.ROOT_EVENT_TYPE, instance.getEventType());
+		assertEquals(TimelineEventType.ROOT_EVENT_TYPE, instance.getRootEventType());
 		instance = new EventTypeFilter(TimelineEventType.FILE_SYSTEM);
-		assertEquals(TimelineEventType.FILE_SYSTEM, instance.getEventType());
+		assertEquals(TimelineEventType.FILE_SYSTEM, instance.getRootEventType());
 		instance = new EventTypeFilter(TimelineEventType.MESSAGE);
-		assertEquals(TimelineEventType.MESSAGE, instance.getEventType());
+		assertEquals(TimelineEventType.MESSAGE, instance.getRootEventType());
 	}
 
 	/**
diff --git a/configure.ac b/configure.ac
index 5b30b7d2078c8a8e5f2651700bd0b3849750b642..b20e6378f53ed62da5a594c14a57ee6074052945 100644
--- a/configure.ac
+++ b/configure.ac
@@ -4,7 +4,7 @@ dnl Process this file with autoconf to produce a configure script.
 
 AC_PREREQ(2.59)
 
-AC_INIT(sleuthkit, 4.6.7)
+AC_INIT(sleuthkit, 4.7.0)
 m4_include([m4/ax_pthread.m4])
 dnl include the version from 1.12.1. This will work for
 m4_include([m4/cppunit.m4])
@@ -13,11 +13,12 @@ m4_include([m4/ac_prog_javac_works.m4])
 m4_include([m4/ac_prog_javac.m4])
 m4_include([m4/ac_prog_java_works.m4])
 m4_include([m4/ac_prog_java.m4])
+m4_include([m4/ax_cxx_compile_stdcxx.m4])
 
 AC_CONFIG_SRCDIR([tsk/base/tsk_base.h])
 AC_CONFIG_HEADERS([tsk/tsk_config.h])
 AC_CONFIG_AUX_DIR(config)
-AM_INIT_AUTOMAKE([foreign])
+AM_INIT_AUTOMAKE([foreign tar-ustar])
 AM_PATH_CPPUNIT(1.12.1)
 AM_CONDITIONAL([CPPUNIT],[test "x$no_cppunit" = x])
 AM_PROG_LIBTOOL
@@ -77,6 +78,8 @@ dnl AC_CHECK_FUNCS([dup2 gethostname isascii iswprint memset munmap regcomp sele
 AC_CHECK_FUNCS([ishexnumber err errx warn warnx vasprintf getrusage])
 AC_CHECK_FUNCS([strlcpy strlcat])
 
+AX_CXX_COMPILE_STDCXX([11], [noext], [mandatory])
+
 AX_PTHREAD([
     AC_DEFINE(HAVE_PTHREAD,1,[Define if you have POSIX threads libraries and header files.])
     CLIBS="$PTHREAD_LIBS $LIBS"
diff --git a/debian/changelog b/debian/changelog
index 3802a67b7411953aadf3a86f8516a0dd2b82ba81..0d09c5daf67f69edc04998773f8b1a927fd042e3 100644
--- a/debian/changelog
+++ b/debian/changelog
@@ -1,4 +1,4 @@
-sleuthkit-java (4.6.7-1) unstable; urgency=medium
+sleuthkit-java (4.7.0-1) unstable; urgency=medium
 
   * Initial release (Closes: #nnnn)  <nnnn is the bug number of your ITP>
 
diff --git a/debian/sleuthkit-java.install b/debian/sleuthkit-java.install
index 6decacbed02f9d3439f8c42c05e5e2d1b999782d..fae334721d480b7b6bceee8fd712528460364408 100644
--- a/debian/sleuthkit-java.install
+++ b/debian/sleuthkit-java.install
@@ -1,3 +1,3 @@
 bindings/java/lib/sqlite-jdbc-3.25.2.jar /usr/share/java
-bindings/java/dist/sleuthkit-4.6.7.jar /usr/share/java
+bindings/java/dist/sleuthkit-4.7.0.jar /usr/share/java
 
diff --git a/m4/ax_cxx_compile_stdcxx.m4 b/m4/ax_cxx_compile_stdcxx.m4
index 5032bba8091d5d1074f4509b4c47b38a66389c6b..15f795abcc77e2c4859bf8f1a00a3c85473467d4 100644
--- a/m4/ax_cxx_compile_stdcxx.m4
+++ b/m4/ax_cxx_compile_stdcxx.m4
@@ -33,19 +33,19 @@
 #   Copyright (c) 2014, 2015 Google Inc.; contributed by Alexey Sokolov <sokolov@google.com>
 #   Copyright (c) 2015 Paul Norman <penorman@mac.com>
 #   Copyright (c) 2015 Moritz Klammler <moritz@klammler.eu>
-#   Copyright (c) 2016 Krzesimir Nowak <qdlacz@gmail.com>
+#   Copyright (c) 2016, 2018 Krzesimir Nowak <qdlacz@gmail.com>
+#   Copyright (c) 2019 Enji Cooper <yaneurabeya@gmail.com>
 #
 #   Copying and distribution of this file, with or without modification, are
 #   permitted in any medium without royalty provided the copyright notice
 #   and this notice are preserved.  This file is offered as-is, without any
 #   warranty.
 
-#serial 7
+#serial 11
 
 dnl  This macro is based on the code from the AX_CXX_COMPILE_STDCXX_11 macro
 dnl  (serial version number 13).
 
-AX_REQUIRE_DEFINED([AC_MSG_WARN])
 AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl
   m4_if([$1], [11], [ax_cxx_compile_alternatives="11 0x"],
         [$1], [14], [ax_cxx_compile_alternatives="14 1y"],
@@ -61,14 +61,6 @@ AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl
         [m4_fatal([invalid third argument `$3' to AX_CXX_COMPILE_STDCXX])])
   AC_LANG_PUSH([C++])dnl
   ac_success=no
-  AC_CACHE_CHECK(whether $CXX supports C++$1 features by default,
-  ax_cv_cxx_compile_cxx$1,
-  [AC_COMPILE_IFELSE([AC_LANG_SOURCE([_AX_CXX_COMPILE_STDCXX_testbody_$1])],
-    [ax_cv_cxx_compile_cxx$1=yes],
-    [ax_cv_cxx_compile_cxx$1=no])])
-  if test x$ax_cv_cxx_compile_cxx$1 = xyes; then
-    ac_success=yes
-  fi
 
   m4_if([$2], [noext], [], [dnl
   if test x$ac_success = xno; then
@@ -139,7 +131,6 @@ AC_DEFUN([AX_CXX_COMPILE_STDCXX], [dnl
               [define if the compiler supports basic C++$1 syntax])
   fi
   AC_SUBST(HAVE_CXX$1)
-  m4_if([$1], [17], [AC_MSG_WARN([C++17 is not yet standardized, so the checks may change in incompatible ways anytime])])
 ])
 
 
@@ -199,11 +190,13 @@ namespace cxx11
 
     struct Base
     {
+      virtual ~Base() {}
       virtual void f() {}
     };
 
     struct Derived : public Base
     {
+      virtual ~Derived() override {}
       virtual void f() override {}
     };
 
@@ -587,20 +580,12 @@ m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_17], [[
 
 #error "This is not a C++ compiler"
 
-#elif __cplusplus <= 201402L
+#elif __cplusplus < 201703L
 
 #error "This is not a C++17 compiler"
 
 #else
 
-#if defined(__clang__)
-  #define REALLY_CLANG
-#else
-  #if defined(__GNUC__)
-    #define REALLY_GCC
-  #endif
-#endif
-
 #include <initializer_list>
 #include <utility>
 #include <type_traits>
@@ -608,16 +593,12 @@ m4_define([_AX_CXX_COMPILE_STDCXX_testbody_new_in_17], [[
 namespace cxx17
 {
 
-#if !defined(REALLY_CLANG)
   namespace test_constexpr_lambdas
   {
 
-    // TODO: test it with clang++ from git
-
     constexpr int foo = [](){return 42;}();
 
   }
-#endif // !defined(REALLY_CLANG)
 
   namespace test::nested_namespace::definitions
   {
@@ -852,12 +833,9 @@ namespace cxx17
 
   }
 
-#if !defined(REALLY_CLANG)
   namespace test_template_argument_deduction_for_class_templates
   {
 
-    // TODO: test it with clang++ from git
-
     template <typename T1, typename T2>
     struct pair
     {
@@ -876,7 +854,6 @@ namespace cxx17
     }
 
   }
-#endif // !defined(REALLY_CLANG)
 
   namespace test_non_type_auto_template_parameters
   {
@@ -890,12 +867,9 @@ namespace cxx17
 
   }
 
-#if !defined(REALLY_CLANG)
   namespace test_structured_bindings
   {
 
-    // TODO: test it with clang++ from git
-
     int arr[2] = { 1, 2 };
     std::pair<int, int> pr = { 1, 2 };
 
@@ -927,14 +901,10 @@ namespace cxx17
     const auto [ x3, y3 ] = f3();
 
   }
-#endif // !defined(REALLY_CLANG)
 
-#if !defined(REALLY_CLANG)
   namespace test_exception_spec_type_system
   {
 
-    // TODO: test it with clang++ from git
-
     struct Good {};
     struct Bad {};
 
@@ -952,7 +922,6 @@ namespace cxx17
     static_assert (std::is_same_v<Good, decltype(f(g1, g2))>);
 
   }
-#endif // !defined(REALLY_CLANG)
 
   namespace test_inline_variables
   {
@@ -977,6 +946,6 @@ namespace cxx17
 
 }  // namespace cxx17
 
-#endif  // __cplusplus <= 201402L
+#endif  // __cplusplus < 201703L
 
-]])
+]])
\ No newline at end of file
diff --git a/packages/sleuthkit.spec b/packages/sleuthkit.spec
index 27fed9749c69b73a94f2d90fefb30bd72db51216..5ad496c55079cfc061bef7ca4a61d33719d038c3 100644
--- a/packages/sleuthkit.spec
+++ b/packages/sleuthkit.spec
@@ -1,5 +1,5 @@
 Name:		sleuthkit	
-Version:	4.6.7
+Version:	4.7.0
 Release:	1%{?dist}
 Summary:	The Sleuth Kit (TSK) is a library and collection of command line tools that allow you to investigate volume and file system data.	
 
diff --git a/tests/runtests.sh b/tests/runtests.sh
index 8a98ac66cff58115662a9e34fe002261a4196cdd..b1bb073d847c45e7fa2b7d1f1eabe8bab51da2ec 100755
--- a/tests/runtests.sh
+++ b/tests/runtests.sh
@@ -1,5 +1,12 @@
 #!/bin/bash
 
+# NOTE: This script is not currently being used anywhere. 
+# it curently runs fs_thread_test on a set of images that
+# are not public
+#
+# This could probably be renamed to something with threadtest in the name
+
+
 EXIT_SUCCESS=0;
 EXIT_FAILURE=1;
 EXIT_IGNORE=77;
diff --git a/tests/test_libraries.sh b/tests/test_libraries.sh
index a20dc0104056e6c9ee34685a6492a1f8a1bdd8a6..7c5fb1958c9148d4591e90f9f8e5ba36ee67361f 100755
--- a/tests/test_libraries.sh
+++ b/tests/test_libraries.sh
@@ -15,17 +15,22 @@ if [ ! -d "./data" ];then
 	fi
 fi
 
-
 #Download from images from google drive
 ggID=("imageformat_mmls_1.vhd","15vEesL8xTMFSo-uLA5dsx3puVaKcGEyw" "imageformat_mmls_1.vmdk","1uLC0FjUWdl3uLCi1QaZ8O72q281jtzIu" "imageformat_mmls_1.E01","1YBCh3yP4Ny7eads4TC-dL3ycaNNrlzWo")    
 for i in ${ggID[@]};do
 	name=${i%,*}
-	id=${i#*,}
-	COOKIES=$(mktemp)
-	CODE=$(wget --save-cookies $COOKIES --keep-session-cookies --no-check-certificate "https://docs.google.com/uc?export=download&id=${id}" -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/Code: \1\n/p')
-	CODE=$(echo $CODE | rev | cut -d: -f1 | rev | xargs)
-	wget --load-cookies $COOKIES "https://docs.google.com/uc?export=download&confirm=${CODE}&id=${id}" -O ./data/${name}
-	rm -f $COOKIES
+  if [ ! -f "./data/${name}" ]; then
+    id=${i#*,}
+    COOKIES=$(mktemp)
+    CODE=$(wget --save-cookies $COOKIES --keep-session-cookies --no-check-certificate "https://docs.google.com/uc?export=download&id=${id}" -O- | sed -rn 's/.*confirm=([0-9A-Za-z_]+).*/Code: \1\n/p')
+    CODE=$(echo $CODE | rev | cut -d: -f1 | rev | xargs)
+    wget --load-cookies $COOKIES "https://docs.google.com/uc?export=download&confirm=${CODE}&id=${id}" -O ./data/${name}
+    rm -f $COOKIES
+    if [ ! -f "./data/${name}" ]; then
+      echo "Error downloading data (${name})"
+      exit $EXIT_FAILURE
+    fi 
+  fi
 done
 
 #exits with FAILURE status if the command failed
@@ -41,31 +46,27 @@ checkExitStatus (){
 #command to check on the images
 mmls_cmd=../tools/vstools/mmls
 
-
 #saving the list of supported images to dev variable
-imgFormatList=$mmls_cmd -i list 2>&1 > /dev/null | sed '1d' |awk '{print $1}'
+imgFormatList=$($mmls_cmd -i list 2>&1 > /dev/null | sed '1d' |awk '{print $1}')
 
 # Verify mmls does not return an error with various formats. 
-if [[ " ${imgFormatList[@]} " =~ " ${vmdk} " ]]; then
+if [[ "${imgFormatList}" =~ "vmdk" ]]; then
 	$mmls_cmd ./data/imageformat_mmls_1.vmdk > /dev/null
 	checkExitStatus $? "vmdk"
 else
 	echo "Tools not compiled with libvmdk"
-	exit $EXIT_FAILURE 
 fi
 
-if [[ " ${imgFormatList[@]} " =~ " ${vhd} " ]]; then
+if [[ "${imgFormatList}" =~ "vhd" ]]; then
 	$mmls_cmd ./data/imageformat_mmls_1.vhd > /dev/null
 	checkExitStatus $? "vhd"
 else
 	echo "Tools not compiled with libvhdi"
-	exit $EXIT_FAILURE
 fi
 
-if [[ " ${imgFormatList[@]}" =~ "${ewf} " ]]; then
+if [[ "${imgFormatList}" =~ "ewf" ]]; then
 	$mmls_cmd ./data/imageformat_mmls_1.E01 > /dev/null
 	checkExitStatus $? "ewf"
 else
 	echo "Tools not compiled with libewf"
-	exit $EXIT_FAILURE
 fi
diff --git a/tools/logicalimager/DriveUtil.cpp b/tools/logicalimager/DriveUtil.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..81a999eff40407c5b3ce990acfbd074433f3894d
--- /dev/null
+++ b/tools/logicalimager/DriveUtil.cpp
@@ -0,0 +1,527 @@
+/*
+** The Sleuth Kit
+**
+** Brian Carrier [carrier <at> sleuthkit [dot] org]
+** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+/**
+* \file DriveUtil.cpp
+* Contains C++ code that implement the Drive Util class.
+*/
+
+#include <iostream>
+#include <string>
+#include <list>
+#include <Wbemidl.h>
+#include <comutil.h>
+
+#include "DriveUtil.h"
+#include "ReportUtil.h"
+#include "TskHelper.h"
+#include "tsk/auto/tsk_auto.h"
+
+/*
+* Test if Current Working Directory file system is FAT.
+*
+* @param [out] cwd Current working directory
+* @returns true if current working directory file system is FAT, false otherwise.
+*/
+bool DriveUtil::cwdIsFAT(std::wstring &cwd) {
+    wchar_t *buffer;
+
+    if ((buffer = _wgetcwd(NULL, 0)) == NULL) {
+        ReportUtil::ReportUtil::consoleOutput(stderr, "Error: _wgetcwd failed\n");
+        ReportUtil::handleExit(1);
+    }
+
+    cwd = buffer;
+
+    wchar_t drive[3];
+    wcsncpy_s(drive, 3, buffer, 2);
+    drive[2] = 0;
+    free(buffer);
+    return driveIsFAT(drive);
+}
+
+/*
+* Test if drive is a FAT file system
+*
+* @param drive Drive to get, must be of the format "C:"
+* @return true if drive is FAT, false otherwise.
+*/
+bool DriveUtil::driveIsFAT(wchar_t *drive) {
+    std::wstring imageStr = std::wstring(L"\\\\.\\") + drive;
+    const TSK_TCHAR *image = (TSK_TCHAR *)imageStr.c_str();
+    bool result = false;
+
+    TSK_IMG_INFO *img = TskHelper::addFSFromImage(image);
+    if (img == NULL) {
+        return result;
+    }
+
+    const std::list<TSK_FS_INFO *> fsList = TskHelper::getInstance().getFSInfoList();
+    for (std::list<TSK_FS_INFO *>::const_iterator fsListIter = fsList.begin(); fsListIter != fsList.end(); ++fsListIter) {
+        TSK_FS_INFO *fsInfo = *fsListIter;
+        TSK_FS_TYPE_ENUM fileSystemType = fsInfo->ftype;
+        if (fileSystemType == TSK_FS_TYPE_FAT12 ||
+            fileSystemType == TSK_FS_TYPE_FAT16 ||
+            fileSystemType == TSK_FS_TYPE_FAT32 ||
+            fileSystemType == TSK_FS_TYPE_FAT_DETECT) {
+            result = true;
+            break;
+        }
+    }
+    img->close(img);
+    TskHelper::getInstance().reset();
+    return result;
+}
+
+
+/**
+* checkDriveForBitlocker: checks if the given drive has BitLocker encrypted
+*
+* @param input driveLetter drive to check, for example C:
+*
+* @returns  0  if the drive is not encrypted
+*           1  if the drive is Bitlocker encrypted
+*           -1 if error
+*
+*/
+int DriveUtil::checkDriveForBitlocker(const std::string &driveLetter) {
+
+    IWbemLocator *pWbemLocator = NULL;
+    IWbemServices *pWbemServices = NULL;
+
+    long rc = 0;
+
+    std::wstring wsBitLockerNamespace = L"ROOT\\CIMV2\\Security\\MicrosoftVolumeEncryption";
+
+    // Init WMI with the requisite namespace. This may fail on some versions of Windows, if Bitlocker in not installed.
+    rc = wmi_init(wsBitLockerNamespace, &pWbemLocator, &pWbemServices);
+    if (0 != rc) {
+        if ((WBEM_E_INVALID_NAMESPACE == rc)) {
+            std::cerr << " Bitlocker is not installed." << std::endl;
+            return 0;
+        }
+        else {
+            std::cerr << "Failed to connect to WMI namespace = " << TskHelper::toNarrow(wsBitLockerNamespace) << std::endl;
+            return -1;
+        }
+    }
+
+    // Use the IWbemServices pointer to make requests of WMI.
+    // Make requests here:
+    HRESULT hres;
+    IEnumWbemClassObject* pEnumerator = NULL;
+
+    unsigned int bitLockerStatus = 0; // assume no Bitlocker
+    int returnStatus = 0;
+    // WMI query
+    std::wstring wstrQuery = L"SELECT * FROM Win32_EncryptableVolume where driveletter = '";
+    wstrQuery += TskHelper::toWide(driveLetter);
+    wstrQuery += L"'";
+
+    // Run WMI query
+    hres = pWbemServices->ExecQuery(
+        bstr_t("WQL"),
+        bstr_t(wstrQuery.c_str()),
+        WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
+        NULL,
+        &pEnumerator);
+
+    if (FAILED(hres)) {
+        std::cerr << "WMI Query for Win32_EncryptableVolume failed. "
+            << "Error code = 0x"
+            << std::hex << hres << std::endl;
+        wmi_close(&pWbemLocator, &pWbemServices);
+        return -1;
+    }
+    else {
+        IWbemClassObject *pclsObj;
+        ULONG uReturn = 0;
+        while (pEnumerator) {
+            hres = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
+            if (0 == uReturn) break;
+
+            VARIANT vtProp;
+            hres = pclsObj->Get(_bstr_t(L"EncryptionMethod"), 0, &vtProp, 0, 0);
+
+            if (WBEM_E_NOT_FOUND == hres) { // Means Bitlocker is not installed
+                bitLockerStatus = 0;
+            }
+            else {
+                unsigned int encryptionMethod = vtProp.uintVal;
+                bitLockerStatus = (0 == encryptionMethod) ? 0 : 1;
+                if (bitLockerStatus == 1) {
+                    returnStatus = 1;
+                }
+            }
+            VariantClear(&vtProp);
+        }
+    }
+    pEnumerator->Release();
+
+    wmi_close(&pWbemLocator, &pWbemServices);
+
+    return returnStatus;
+}
+
+/**
+* isDriveLocked: checks if the given drive is BitLocker locked
+*
+* @param input driveLetter drive to check, for example C:
+*
+* @returns  0  if the drive is not locked
+*           1  if the drive is Bitlocker locked
+*           -1 if error
+*
+*/
+int DriveUtil::isDriveLocked(const std::string &driveLetter) {
+
+    IWbemLocator *pWbemLocator = NULL;
+    IWbemServices *pWbemServices = NULL;
+
+    long rc = 0;
+
+    std::wstring wsBitLockerNamespace = L"ROOT\\CIMV2\\Security\\MicrosoftVolumeEncryption";
+
+    // Init WMI with the requisite namespace. This may fail on some versions of Windows, if Bitlocker in not installed.
+    rc = wmi_init(wsBitLockerNamespace, &pWbemLocator, &pWbemServices);
+    if (0 != rc) {
+        if ((WBEM_E_INVALID_NAMESPACE == rc)) {
+            std::cerr << " Bitlocker is not installed." << std::endl;
+            return 0;
+        }
+        else {
+            std::cerr << "Failed to connect to WMI namespace = " << TskHelper::toNarrow(wsBitLockerNamespace) << std::endl;
+            return -1;
+        }
+    }
+
+    // Use the IWbemServices pointer to make requests of WMI.
+    // Make requests here:
+    HRESULT hres;
+    IEnumWbemClassObject* pEnumerator = NULL;
+
+    int returnStatus = 0;
+    // WMI query
+    std::wstring wstrQuery = L"SELECT * FROM Win32_EncryptableVolume where driveletter = '";
+    wstrQuery += TskHelper::toWide(driveLetter);
+    wstrQuery += L"'";
+
+    // Run WMI query
+    hres = pWbemServices->ExecQuery(
+        bstr_t("WQL"),
+        bstr_t(wstrQuery.c_str()),
+        WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
+        NULL,
+        &pEnumerator);
+
+    if (FAILED(hres)) {
+        std::cerr << "WMI Query for Win32_EncryptableVolume failed. "
+            << "Error code = 0x"
+            << std::hex << hres << std::endl;
+        wmi_close(&pWbemLocator, &pWbemServices);
+        return -1;
+    }
+    else {
+        IWbemClassObject *pclsObj;
+        ULONG uReturn = 0;
+        while (pEnumerator) {
+            hres = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
+            if (0 == uReturn) break;
+
+            VARIANT vtProp;
+            hres = pclsObj->Get(_bstr_t(L"ProtectionStatus"), 0, &vtProp, 0, 0);
+
+            if (WBEM_E_NOT_FOUND != hres) {
+                unsigned int protectionStatus = vtProp.uintVal;
+                if (2 == protectionStatus) {
+                    returnStatus = 1;
+                }
+            }
+            VariantClear(&vtProp);
+        }
+    }
+    pEnumerator->Release();
+
+    wmi_close(&pWbemLocator, &pWbemServices);
+
+    return returnStatus;
+}
+
+/**
+* wmi_init: Initialize WMN
+*
+* @param input wmiNamespace - wmi namespace to open
+* @returns  0 on success
+*                        WBEM_E_INVALID_NAMESPACE, if namespace is not found
+*           -1 if error
+*
+* Ref: https://msdn.microsoft.com/en-us/library/aa390423(VS.85).aspx
+*
+*/
+long DriveUtil::wmi_init(const std::wstring& wmiNamespace, IWbemLocator **ppWbemLocator, IWbemServices **ppWbemServices) {
+    HRESULT hres;
+
+    // Step 1: Initialize COM.
+
+    hres = CoInitializeEx(0, COINIT_MULTITHREADED);
+    if (FAILED(hres)) {
+        ReportUtil::consoleOutput(stderr, "wmi_init: Failed to initialize COM library. Error code = %#X\n", hres);
+        return -1;                  // Program has failed.
+    }
+
+    // Step 2: Set general COM security levels
+    hres = CoInitializeSecurity(
+        NULL,
+        -1,                          // COM authentication
+        NULL,                        // Authentication services
+        NULL,                        // Reserved
+        RPC_C_AUTHN_LEVEL_DEFAULT,   // Default authentication
+        RPC_C_IMP_LEVEL_IMPERSONATE, // Default Impersonation
+        NULL,                        // Authentication info
+        EOAC_NONE,                   // Additional capabilities
+        NULL                         // Reserved
+    );
+
+    if (FAILED(hres)) {
+        ReportUtil::consoleOutput(stderr, "wmi_init: Failed to initialize security. Error code = %#X\n", hres);
+        CoUninitialize();
+        return -1;                    // Program has failed.
+    }
+
+    // Step 3: Obtain the initial locator to WMI
+    hres = CoCreateInstance(
+        CLSID_WbemLocator,
+        0,
+        CLSCTX_INPROC_SERVER,
+        IID_IWbemLocator, (LPVOID *)ppWbemLocator);
+
+    if (FAILED(hres))
+    {
+        ReportUtil::consoleOutput(stderr, "wmi_init: Failed to create IWbemLocator object. Err code = %#X\n", hres);
+        CoUninitialize();
+        return -1;                 // Program has failed.
+    }
+
+    // Step 4: Connect to WMI through the IWbemLocator::ConnectServer method
+    // Connect to the given namespace with
+    // the current user and obtain pointer pSvc
+    // to make IWbemServices calls.
+    hres = (*ppWbemLocator)->ConnectServer(
+        _bstr_t(wmiNamespace.c_str()), // Object path of WMI namespace
+        NULL,                    // User name. NULL = current user
+        NULL,                    // User password. NULL = current
+        0,                       // Locale. NULL indicates current
+        NULL,                    // Security flags.
+        0,                       // Authority (e.g. Kerberos)
+        0,                       // Context object
+        ppWbemServices                    // pointer to IWbemServices proxy
+    );
+
+    if (FAILED(hres)) {
+        if (WBEM_E_INVALID_NAMESPACE != hres) {
+            ReportUtil::consoleOutput(stderr, "wmi_init: Could not connect to namespace %s, Error = %s\n",
+                TskHelper::toNarrow(wmiNamespace).c_str(), ReportUtil::GetErrorStdStr(hres).c_str());
+        }
+
+        (*ppWbemLocator)->Release();
+        CoUninitialize();
+
+        return (WBEM_E_INVALID_NAMESPACE == hres) ? hres : -1;
+    }
+
+    // Step 5: Set security levels on the proxy
+    hres = CoSetProxyBlanket(
+        *ppWbemServices,                        // Indicates the proxy to set
+        RPC_C_AUTHN_WINNT,           // RPC_C_AUTHN_xxx
+        RPC_C_AUTHZ_NONE,            // RPC_C_AUTHZ_xxx
+        NULL,                        // Server principal name
+        RPC_C_AUTHN_LEVEL_CALL,      // RPC_C_AUTHN_LEVEL_xxx
+        RPC_C_IMP_LEVEL_IMPERSONATE, // RPC_C_IMP_LEVEL_xxx
+        NULL,                        // client identity
+        EOAC_NONE                    // proxy capabilities
+    );
+
+    if (FAILED(hres)) {
+        ReportUtil::consoleOutput(stderr, "wmi_init: Could not set proxy blanket. Error code = %#X\n", hres);
+        (*ppWbemServices)->Release();
+        (*ppWbemLocator)->Release();
+        CoUninitialize();
+        return -1;               // Program has failed.
+    }
+    return 0;
+}
+
+/**
+* wmi_close: closes WMI
+*
+* @returns  0 on success
+*           -1 if error
+*
+*/
+int DriveUtil::wmi_close(IWbemLocator **ppWbemLocator, IWbemServices **ppWbemServices) {
+    // Cleanup
+    // ========
+
+    (*ppWbemServices)->Release();
+    (*ppWbemLocator)->Release();
+    CoUninitialize();
+
+    (*ppWbemServices) = NULL;
+    (*ppWbemLocator) = NULL;
+
+    return 0;
+}
+
+/**
+* checkDriveForLDM: checks if the given drive is an LDM disk
+*
+* @param input driveLetter drive to check, for example C:
+*
+* @returns  0 if the drive is NOT an LDM disk
+*           1 if the drive IS an LDM disk
+*           -1 if error, or if drive not found
+*
+*/
+int DriveUtil::checkDriveForLDM(const std::string &driveLetter) {
+
+    IWbemLocator *pWbemLocator = NULL;
+    IWbemServices *pWbemServices = NULL;
+
+    if (0 != wmi_init(L"ROOT\\CIMV2", &pWbemLocator, &pWbemServices)) {
+        return -1;
+    }
+
+    // Use the IWbemServices pointer to make requests of WMI.
+    // Make requests here:
+    HRESULT hres;
+    IEnumWbemClassObject* pEnumerator = NULL;
+    bool bDriveFound = false;
+    int isLDM = 0;
+
+    std::wstring wstrQuery = L"ASSOCIATORS OF {Win32_LogicalDisk.DeviceID='";
+    wstrQuery += TskHelper::toWide(driveLetter);
+    wstrQuery += L"'} where AssocClass=Win32_LogicalDiskToPartition";
+
+    // Run WMI query
+    hres = pWbemServices->ExecQuery(
+        bstr_t("WQL"),
+        bstr_t(wstrQuery.c_str()),
+        WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
+        NULL,
+        &pEnumerator);
+
+    if (FAILED(hres)) {
+        std::cerr << "WMI Query for partition type failed. "
+            << "Error code = 0x"
+            << std::hex << hres << std::endl;
+        wmi_close(&pWbemLocator, &pWbemServices);
+        return -1;
+    }
+    else {
+        IWbemClassObject *pclsObj;
+        ULONG uReturn = 0;
+        while (pEnumerator) {
+            hres = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
+            if (0 == uReturn) break;
+
+            VARIANT vtProp, vtProp2;
+
+            hres = pclsObj->Get(_bstr_t(L"Type"), 0, &vtProp, 0, 0);
+            std::wstring partitionType = vtProp.bstrVal;
+
+            hres = pclsObj->Get(_bstr_t(L"DeviceID"), 0, &vtProp2, 0, 0);
+            std::wstring deviceID = vtProp2.bstrVal;
+
+            VariantClear(&vtProp);
+            VariantClear(&vtProp2);
+
+            bDriveFound = true;
+
+            //std::wcout << L"Drive: " << TskHelper::toWide(driveLetter) << ", DeviceID:  " << deviceID << ", Type: " << partitionType << std::endl;
+            if (std::string::npos != TskHelper::toLower(TskHelper::toNarrow(partitionType)).find("logical disk manager")) {
+                //std::cerr << "Found Logical Disk Manager disk for drive = " << driveLetter << std::endl;
+                isLDM = 1;
+            }
+        }
+    }
+    pEnumerator->Release();
+
+    wmi_close(&pWbemLocator, &pWbemServices);
+
+    return bDriveFound ? isLDM : -1;
+}
+
+/*
+* Test if a drive is a BitLocker or LDM drive
+*
+* @param systemDriveLetter Drive letter, in the form of "C:"
+* @returns true if this is a BitLocker or LDM drive, false otherwise.
+*
+*/
+bool DriveUtil::hasBitLockerOrLDM(const std::string &systemDriveLetter) {
+    int checkLDMStatus = 0;
+    int checkBitlockerStatus = 0;
+
+    checkLDMStatus = DriveUtil::checkDriveForLDM(systemDriveLetter);
+    if (1 == checkLDMStatus) {
+        ReportUtil::printDebug("System drive %s is an LDM disk\n", systemDriveLetter.c_str());
+        return TRUE;
+    }
+
+    // If bitlocker protection is enabled, then analyze it
+    checkBitlockerStatus = DriveUtil::checkDriveForBitlocker(systemDriveLetter);
+    if (1 == checkBitlockerStatus) {
+        ReportUtil::printDebug("System drive %s is BitLocker encrypted\n", systemDriveLetter.c_str());
+        return TRUE;
+    }
+
+    if (0 == checkLDMStatus && 0 == checkBitlockerStatus) {
+        return false;        // neither LDM nor BitLocker detected
+    }
+    else { // an error happened in determining LDM or ProtectionStatus
+        if (-1 == checkLDMStatus) {
+            ReportUtil::printDebug("Error in checking LDM disk\n");
+        }
+        if (-1 == checkBitlockerStatus) {
+            ReportUtil::printDebug("Error in checking BitLocker protection status\n");
+        }
+
+        // Take a chance and go after PhysicalDrives, few systems have LDM or Bitlocker
+        return false;
+    }
+}
+
+/**
+* getPhysicalDrives: return a list of physical drives
+*
+* @param output a vector of physicalDrives
+* @returns true on success, or false on error
+*/
+BOOL DriveUtil::getPhysicalDrives(std::vector<std::wstring> &phyiscalDrives) {
+    char physical[60000];
+
+    /* Get list of Windows devices.  Result is a list of NULL
+    * terminated device names. */
+    if (QueryDosDeviceA(NULL, (LPSTR)physical, sizeof(physical))) {
+        phyiscalDrives.clear();
+        for (char *pos = physical; *pos; pos += strlen(pos) + 1) {
+            std::wstring str(TskHelper::toWide(pos));
+            if (str.rfind(_TSK_T("PhysicalDrive")) == 0) {
+                phyiscalDrives.push_back(str);
+                ReportUtil::printDebug("Found %s from QueryDosDeviceA", pos);
+            }
+        }
+    }
+    else {
+        ReportUtil::consoleOutput(stderr, "QueryDosDevice() return error: %d\n", GetLastError());
+        return false;
+    }
+    return true;
+}
diff --git a/tools/logicalimager/DriveUtil.h b/tools/logicalimager/DriveUtil.h
new file mode 100644
index 0000000000000000000000000000000000000000..d68c932f152d019690f524ab9ef9d64307f08569
--- /dev/null
+++ b/tools/logicalimager/DriveUtil.h
@@ -0,0 +1,34 @@
+/*
+** The Sleuth Kit
+**
+** Brian Carrier [carrier <at> sleuthkit [dot] org]
+** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#pragma once
+
+#include <string>
+
+#include "tsk/libtsk.h"
+
+/**
+* Defines the Drive Utilities
+*
+*/
+class DriveUtil {
+public:
+    static bool cwdIsFAT(std::wstring &cwd);
+    static int isDriveLocked(const std::string &driveLetter);
+    static bool hasBitLockerOrLDM(const std::string &systemDriveLetter);
+    static BOOL getPhysicalDrives(std::vector<std::wstring> &phyiscalDrives);
+
+private:
+    static bool driveIsFAT(wchar_t *drive);
+    static long wmi_init(const std::wstring& wmiNamespace, IWbemLocator **ppWbemLocator, IWbemServices **ppWbemServices);
+    static int wmi_close(IWbemLocator **ppWbemLocator, IWbemServices **ppWbemServices);
+    static int checkDriveForBitlocker(const std::string &driveLetter);
+    static int checkDriveForLDM(const std::string &driveLetter);
+};
diff --git a/tools/logicalimager/FileExtractor.cpp b/tools/logicalimager/FileExtractor.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..c9768222a023253f16b6b146012c037ffb1b0c92
--- /dev/null
+++ b/tools/logicalimager/FileExtractor.cpp
@@ -0,0 +1,221 @@
+/*
+** The Sleuth Kit
+**
+** Brian Carrier [carrier <at> sleuthkit [dot] org]
+** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+/**
+* \file FileExtractor.cpp
+* Contains C++ code that implement the File Extractor class.
+*/
+
+#include <direct.h>
+#include "shlwapi.h"
+
+#include "FileExtractor.h"
+#include "ReportUtil.h"
+#include "TskHelper.h"
+
+/*
+* @param createVHD If true, extract file to VHD.If false, extract to actual file
+* @param cwd Current working directory
+* @param directoryPath Logical imager top output directory
+*/
+FileExtractor::FileExtractor(bool createVHD, const std::wstring &cwd, const std::string &directoryPath) :
+    m_createVHD(createVHD), m_cwd(cwd), m_rootDirectoryPath(directoryPath) {
+}
+
+/*
+* Initialize a directory name per image.
+* Call this method once per image at the start of analyzing a drive image.
+* This method creates a directory with the subDir name to store extracted files.
+*
+* @param imageDirName Directory name for this image
+*/
+void FileExtractor::initializePerImage(const std::string &imageDirName) {
+    m_dirCounter = 1; // reset for each image
+    m_fileCounter = 1;
+    m_imageDirName = imageDirName;
+    if (!m_createVHD) {
+        createDirectoryRecursively(TskHelper::toWide((std::string(m_rootDirectoryPath + getRootImageDirPrefix() + std::to_string(m_dirCounter)))));
+    }
+}
+
+/**
+* Extract a file. tsk_img_writer_create must have been called prior to this method.
+* Exit the program if file creation failed.
+*
+* @param fs_file File details
+* @param path Parent path of the file
+* @param [out] extractedFilePath If createVHD is false, return the extract file path
+* @returns TSK_RETVAL_ENUM TSK_OK if file is extracted, TSK_ERR otherwise.
+*/
+TSK_RETVAL_ENUM FileExtractor::extractFile(TSK_FS_FILE *fs_file, const char *path, std::string &extractedFilePath) {
+    TSK_OFF_T offset = 0;
+    size_t bufferLen = 16 * 1024;
+    char buffer[16 * 1024];
+    FILE *file = (FILE *)NULL;
+    std::string filename;
+    TSK_RETVAL_ENUM result = TSK_OK;
+
+    if (fs_file->meta == NULL) {
+        // Prevent creating an empty file, tsk_fs_file_read will fail when meta is null.
+        return TSK_ERR;
+    }
+
+    if (!m_createVHD) {
+        if (m_fileCounter > maxFilesInDir) {
+            FileExtractor::generateDirForFiles();
+            m_fileCounter = 1;
+        }
+        extractedFilePath = getRootImageDirPrefix() + std::to_string(m_dirCounter) + "/f-" + std::to_string(m_fileCounter) + (char *)PathFindExtensionA(fs_file->name->name);
+        m_fileCounter++;
+        filename = m_rootDirectoryPath + "/" + extractedFilePath;
+        file = _wfopen(TskHelper::toWide(filename).c_str(), L"wb");
+        if (file == NULL) {
+            // This can happen when the extension is invalid under Windows. Try again with no extension.
+            ReportUtil::consoleOutput(stderr, "ERROR: extractFile failed for %s, reason: %s\nTrying again with fixed file extension\n", filename.c_str(), _strerror(NULL));
+            extractedFilePath = getRootImageDirPrefix() + std::to_string(m_dirCounter) + "/f-" + std::to_string(m_fileCounter - 1);
+            filename = m_rootDirectoryPath + "/" + extractedFilePath;
+            file = _wfopen(TskHelper::toWide(filename).c_str(), L"wb");
+            if (file == NULL) {
+                ReportUtil::consoleOutput(stderr, "ERROR: extractFile failed for %s, reason: %s\n", filename.c_str(), _strerror(NULL));
+                ReportUtil::handleExit(1);
+            }
+        }
+        TskHelper::replaceAll(extractedFilePath, "/", "\\");
+    }
+
+    while (true) {
+        ssize_t bytesRead = tsk_fs_file_read(fs_file, offset, buffer, bufferLen, TSK_FS_FILE_READ_FLAG_NONE);
+        if (bytesRead == -1) {
+            if (fs_file->meta) {
+                if (fs_file->meta->size == 0) {
+                    if (fs_file->meta->addr != 0) {
+                        // ts_fs_file_read returns -1 with empty files, don't report it.
+                        result = TSK_OK;
+                    } else {
+                        // if addr is 0, the drive maybe disconnected, extraction failed.
+                        ReportUtil::printDebug("extractFile: tsk_fs_file_read returns -1 filename=%s\toffset=%" PRIxOFF "\n", fs_file->name->name, offset);
+                        ReportUtil::consoleOutput(stderr, "ERROR: Failed to extract file, filename=%s\tpath=%s\n", fs_file->name->name, path);
+                        result = TSK_ERR;
+                    }
+                    break;
+                }
+                else if (fs_file->meta->flags & TSK_FS_NAME_FLAG_UNALLOC) {
+                    // don't report it
+                    result = TSK_ERR;
+                    break;
+                }
+                else {
+                    ReportUtil::printDebug("extractFile: tsk_fs_file_read returns -1 filename=%s\toffset=%" PRIxOFF "\n", fs_file->name->name, offset);
+                    ReportUtil::consoleOutput(stderr, "ERROR: Failed to extract file, filename=%s\tpath=%s\n", fs_file->name->name, path);
+                    result = TSK_ERR;
+                    break;
+                }
+            }
+            else { // meta is NULL
+                // don't report it
+                result = TSK_ERR;
+                break;
+            }
+        }
+        else if (bytesRead == 0) {
+            result = TSK_ERR;
+            break;
+        }
+        if (!m_createVHD && file) {
+            size_t bytesWritten = fwrite((const void *)buffer, sizeof(char), bytesRead, file);
+            if (bytesWritten != bytesRead) {
+                ReportUtil::consoleOutput(stderr, "ERROR: Failed to write file: %s reason: %s\n", filename.c_str(), _strerror(NULL));
+                result = TSK_ERR;
+                break; // don't read anymore once we have a write failure
+            }
+        }
+        offset += bytesRead;
+        if (offset >= fs_file->meta->size) {
+            break;
+        }
+    }
+
+    if (!m_createVHD && file) {
+        fclose(file);
+    }
+
+    return result;
+}
+
+/*
+* Return a string for the /root/<m_imageDirName>/d- prefix
+* @return The prefix string
+*/
+std::string FileExtractor::getRootImageDirPrefix() const {
+    return std::string("/root/" + m_imageDirName + "/d-");
+}
+
+/*
+* Create a directory to store extracted files, using an incremented directory counter.
+* The directory name has a "d-<nnn>" format where <nnn> is the directory counter.
+* Exit the program if directory creation failed.
+*
+*/
+void FileExtractor::generateDirForFiles() {
+    m_dirCounter++;
+    std::string newDir = std::string(m_rootDirectoryPath + getRootImageDirPrefix() + std::to_string(m_dirCounter));
+    if (_mkdir(newDir.c_str()) != 0) {
+        if (errno != EEXIST) {
+            ReportUtil::consoleOutput(stderr, "ERROR: mkdir failed for %s\n", newDir.c_str());
+            ReportUtil::handleExit(1);
+        }
+    }
+}
+
+/**
+* Test if directory exists.
+*
+* @param dirName directory name
+* @return bool true if directory exist, false otherwise.
+*/
+bool FileExtractor::dirExists(const std::wstring &dirName) {
+    DWORD ftyp = GetFileAttributesW(dirName.c_str());
+    if (ftyp == INVALID_FILE_ATTRIBUTES)
+        return false;  //something is wrong with your path!
+
+    if (ftyp & FILE_ATTRIBUTE_DIRECTORY)
+        return true;   // this is a directory!
+
+    return false;    // this is not a directory!
+}
+
+/**
+* Recursively create directory given by path.
+* Does not exit if the directory already exists.
+* Exit the program if a directory creation failed.
+*
+* @param path directory
+*/
+void FileExtractor::createDirectoryRecursively(const std::wstring &path) {
+    if (dirExists(path)) {
+        return;
+    }
+
+    std::wstring path2 = path;
+    TskHelper::replaceAll(path2, L"/", L"\\");
+
+    size_t pos = 0;
+    do
+    {
+        pos = path2.find_first_of(L"\\", pos + 1);
+        if (CreateDirectoryW(std::wstring(L"\\\\?\\" + m_cwd + L"\\" + path2.substr(0, pos)).c_str(), NULL) == 0) {
+            if (GetLastError() != ERROR_ALREADY_EXISTS) {
+                ReportUtil::consoleOutput(stderr, "ERROR: Fail to create directory %s Reason: %s\n", TskHelper::toNarrow(path).c_str(),
+                    ReportUtil::GetErrorStdStr(GetLastError()).c_str());
+                ReportUtil::handleExit(1);
+            }
+        }
+    } while (pos != std::string::npos);
+}
diff --git a/tools/logicalimager/FileExtractor.h b/tools/logicalimager/FileExtractor.h
new file mode 100644
index 0000000000000000000000000000000000000000..2e34433860cbc3b7be1bb2f3922023e6e52ff028
--- /dev/null
+++ b/tools/logicalimager/FileExtractor.h
@@ -0,0 +1,41 @@
+/*
+** The Sleuth Kit
+**
+** Brian Carrier [carrier <at> sleuthkit [dot] org]
+** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#pragma once
+
+#include "tsk/libtsk.h"
+
+/**
+* Defines the File Extractor
+*
+*/
+class FileExtractor {
+public:
+    FileExtractor(bool createVHD, const std::wstring &cwd, const std::string &directoryPath);
+    ~FileExtractor() {};
+    void initializePerImage(const std::string &imageDirName);
+    TSK_RETVAL_ENUM extractFile(TSK_FS_FILE *fs_file, const char *path, std::string &extractedFilePath);
+
+private:
+    FileExtractor(const FileExtractor &) = delete;
+
+    void generateDirForFiles();
+    void createDirectoryRecursively(const std::wstring &path);
+    bool dirExists(const std::wstring &dirName);
+    std::string getRootImageDirPrefix() const;
+
+    bool m_createVHD;
+    int m_dirCounter;
+    int m_fileCounter;
+    std::string m_rootDirectoryPath;
+    std::string m_imageDirName;
+    std::wstring m_cwd;
+    const int maxFilesInDir = 1000;
+};
\ No newline at end of file
diff --git a/tools/logicalimager/LogicalImagerConfiguration.cpp b/tools/logicalimager/LogicalImagerConfiguration.cpp
index 0c67d701864d1cbe4ddb8b7127d7d5bd2031bf93..0d47b0e0edba7fef8e915fe28b83b9c3d823b7b0 100644
--- a/tools/logicalimager/LogicalImagerConfiguration.cpp
+++ b/tools/logicalimager/LogicalImagerConfiguration.cpp
@@ -31,6 +31,9 @@
 *
 */
 LogicalImagerConfiguration::~LogicalImagerConfiguration() {
+    for (std::vector<LogicalImagerRuleSet *>::const_iterator iter = m_ruleSets.begin(); iter != m_ruleSets.end(); ++iter) {
+        delete *iter;
+    }
 }
 
 /*
@@ -207,12 +210,15 @@ LogicalImagerConfiguration::LogicalImagerConfiguration(const std::string &config
         if (it.key() == "rule-sets") {
             for (auto ruleSetIter = it.value().begin(); ruleSetIter != it.value().end(); ++ruleSetIter) {
                 nlohmann::json ruleSetValue = ruleSetIter.value();
-                std::vector<std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>> rules;
+                std::vector<std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>> rules;
                 LogicalImagerRuleSet *ruleSet = new LogicalImagerRuleSet();
                 ruleSet->constructRuleSet(ruleSetValue, rules);
                 m_ruleSets.push_back(ruleSet);
             }
         }
+        else if (it.key() == "create-VHD") {
+            it.value().get_to(m_createVHD);
+        }
         else if (it.key() == "finalize-image-writer") {
             it.value().get_to(m_finalizeImageWriter);
         }
@@ -258,11 +264,11 @@ TSK_RETVAL_ENUM LogicalImagerConfiguration::matches(TSK_FS_FILE *fs_file, const
 
 /**
 * Return a list of full-paths rule sets in the Logical Imager Configuration
-* @returns each element in the list consists of a RuleMatchResult and a list of full-paths.
+* @returns each element in the list consists of a MatchedRuleInfo and a list of full-paths.
 */
-const std::vector<std::pair<const RuleMatchResult *, std::list<std::string>>> LogicalImagerConfiguration::getFullFilePaths() const
+const std::vector<std::pair<const MatchedRuleInfo *, std::list<std::string>>> LogicalImagerConfiguration::getFullFilePaths() const
 {
-    std::vector<std::pair<const RuleMatchResult *, std::list<std::string>>> vector;
+    std::vector<std::pair<const MatchedRuleInfo *, std::list<std::string>>> vector;
     for (std::vector<LogicalImagerRuleSet *>::const_iterator iter = m_ruleSets.begin(); iter != m_ruleSets.end(); ++iter) {
         vector.push_back((*iter)->getFullFilePaths());
     }
diff --git a/tools/logicalimager/LogicalImagerConfiguration.h b/tools/logicalimager/LogicalImagerConfiguration.h
index 4c25cd41f5f7a15aa256d81c9fee472cd013c86a..3c3f130e6a21adef880246b8daa9f94e69385a82 100644
--- a/tools/logicalimager/LogicalImagerConfiguration.h
+++ b/tools/logicalimager/LogicalImagerConfiguration.h
@@ -22,7 +22,7 @@
 
 #include "tsk/tsk_tools_i.h"
 #include "LogicalImagerRuleSet.h"
-#include "RuleMatchResult.h"
+#include "MatchedRuleInfo.h"
 #include "json.h"
 
 /**
@@ -36,7 +36,8 @@ class LogicalImagerConfiguration
     ~LogicalImagerConfiguration();
 
     TSK_RETVAL_ENUM matches(TSK_FS_FILE *fs_file, const char *path) const;
-    const std::vector<std::pair<const RuleMatchResult *, std::list<std::string>>> getFullFilePaths() const;
+    const std::vector<std::pair<const MatchedRuleInfo *, std::list<std::string>>> getFullFilePaths() const;
+    bool getCreateVHD() { return m_createVHD; }
     bool getFinalizeImagerWriter() { return m_finalizeImageWriter; }
     bool getPromptBeforeExit() { return m_promptBeforeExit; }
     std::string getVersion() { return m_version; }
@@ -45,6 +46,7 @@ class LogicalImagerConfiguration
     LogicalImagerConfiguration(const LogicalImagerConfiguration &) = delete;
 
     std::vector<LogicalImagerRuleSet *> m_ruleSets;
+    bool m_createVHD = false;
     bool m_finalizeImageWriter = false;
     bool m_promptBeforeExit = true;
     std::string m_version;
diff --git a/tools/logicalimager/LogicalImagerExtensionRule.cpp b/tools/logicalimager/LogicalImagerExtensionRule.cpp
index b5ff191ff909cf3cb078823aa105808362ebe88c..6e7dccb40c78b73eb9d515cad868a079261e88a8 100755
--- a/tools/logicalimager/LogicalImagerExtensionRule.cpp
+++ b/tools/logicalimager/LogicalImagerExtensionRule.cpp
@@ -29,6 +29,7 @@ LogicalImagerExtensionRule::LogicalImagerExtensionRule(const std::set<std::strin
 }
 
 LogicalImagerExtensionRule::~LogicalImagerExtensionRule() {
+    m_extensions.clear();
 }
 
 /**
diff --git a/tools/logicalimager/LogicalImagerFilenameRule.cpp b/tools/logicalimager/LogicalImagerFilenameRule.cpp
index e3a128c8ea4f4dfc2103ade534239aa8f2e3dd8f..a3d17590e0520910fdfad25c89a76cf641443a01 100755
--- a/tools/logicalimager/LogicalImagerFilenameRule.cpp
+++ b/tools/logicalimager/LogicalImagerFilenameRule.cpp
@@ -28,6 +28,7 @@ LogicalImagerFilenameRule::LogicalImagerFilenameRule(const std::set<std::string>
 }
 
 LogicalImagerFilenameRule::~LogicalImagerFilenameRule() {
+    m_filenames.clear();
 }
 
 /**
diff --git a/tools/logicalimager/LogicalImagerPathRule.cpp b/tools/logicalimager/LogicalImagerPathRule.cpp
index 0a138ddc47bcac7bbbcefbe1d0a2952f4f8cab08..1cc91093c36894e8e7d0475bdc9fa054758de8e2 100755
--- a/tools/logicalimager/LogicalImagerPathRule.cpp
+++ b/tools/logicalimager/LogicalImagerPathRule.cpp
@@ -19,11 +19,6 @@
 static char *userFolderRegex = "/?(documents and settings|users|home)/[^/]+";
 static std::string lowerCaseUserFolder;
 
-bool endsWith(const std::string &str, const std::string &suffix) {
-    return str.size() >= suffix.size() &&
-        str.compare(str.size() - suffix.size(), suffix.size(), suffix) == 0;
-}
-
 /*
 * Construct a path rule.
 *
@@ -47,7 +42,7 @@ LogicalImagerPathRule::LogicalImagerPathRule(const std::set<std::string> &paths)
             // special case, add to regex
             std::string newPattern(lowerCasePath);
             newPattern.replace(newPattern.find(lowerCaseUserFolder), lowerCaseUserFolder.length(), userFolderRegex);
-            if (endsWith(lowerCasePath, "/")) {
+            if (TskHelper::endsWith(lowerCasePath, "/")) {
                 newPattern.append(".*");
             } else {
                 newPattern.append("/.*");
@@ -61,6 +56,8 @@ LogicalImagerPathRule::LogicalImagerPathRule(const std::set<std::string> &paths)
 }
 
 LogicalImagerPathRule::~LogicalImagerPathRule() {
+    m_paths.clear();
+    m_userFolderRegexes.clear();
 }
 
 /**
diff --git a/tools/logicalimager/LogicalImagerRuleSet.cpp b/tools/logicalimager/LogicalImagerRuleSet.cpp
index 433127504d070b2518efdc2b270875a1f4440f70..3086d6e89363e289a8c435569802705c35507ff4 100644
--- a/tools/logicalimager/LogicalImagerRuleSet.cpp
+++ b/tools/logicalimager/LogicalImagerRuleSet.cpp
@@ -217,13 +217,13 @@ void LogicalImagerRuleSet::constructRule(const std::string &ruleSetName, nlohman
         throw std::logic_error("ERROR: a rule with full-paths cannot have other rule definitions");
     }
 
-    RuleMatchResult *ruleMatchKey = new RuleMatchResult(ruleSetName, name, description, shouldSave, shouldAlert);
+    MatchedRuleInfo *ruleMatchKey = new MatchedRuleInfo(ruleSetName, name, description, shouldSave, shouldAlert);
     if (!fullPaths.empty()) {
         m_fullFilePaths.first = ruleMatchKey;
         m_fullFilePaths.second = fullPaths;
     }
     else if (!vector.empty()) {
-        m_rules.push_back(std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>(ruleMatchKey, vector));
+        m_rules.push_back(std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>(ruleMatchKey, vector));
     }
 }
 
@@ -235,7 +235,7 @@ void LogicalImagerRuleSet::constructRule(const std::string &ruleSetName, nlohman
 * @throws std::logic_error on any error
 */
 void LogicalImagerRuleSet::constructRuleSet(const nlohmann::json ruleSet,
-    std::vector<std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>> &outRules
+    std::vector<std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>> &outRules
 ) {
     std::vector<LogicalImagerRuleBase *> vector;
     std::list<std::string> fullPaths;
@@ -265,6 +265,13 @@ LogicalImagerRuleSet::~LogicalImagerRuleSet() {
     for (auto it = m_rules.begin(); it != m_rules.end(); ++it) {
         if (it->first)
             delete it->first;
+        for (auto ruleBaseIter = it->second.begin(); ruleBaseIter != it->second.end(); ++ruleBaseIter) {
+            delete *ruleBaseIter;
+        }
+    }
+    if (m_fullFilePaths.first) {
+        delete m_fullFilePaths.first;
+        m_fullFilePaths.second.clear();
     }
 }
 
@@ -279,8 +286,8 @@ LogicalImagerRuleSet::~LogicalImagerRuleSet() {
  */
 bool LogicalImagerRuleSet::matches(TSK_FS_FILE *fs_file, const char *path, matchCallback callbackFunc) const {
     bool result = true;
-    for (std::vector<std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>>::const_iterator it = m_rules.begin(); it != m_rules.end(); ++it) {
-        const std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>> tuple = *it;
+    for (std::vector<std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>>::const_iterator it = m_rules.begin(); it != m_rules.end(); ++it) {
+        const std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>> tuple = *it;
         std::vector<LogicalImagerRuleBase *> rules = tuple.second;
         bool result = true;
         // All rules in this set must match (ANDed)
@@ -304,6 +311,6 @@ bool LogicalImagerRuleSet::matches(TSK_FS_FILE *fs_file, const char *path, match
 * 
 * @returns the full file paths rule set
 */
-const std::pair<const RuleMatchResult *, std::list<std::string>> LogicalImagerRuleSet::getFullFilePaths() const {
+const std::pair<const MatchedRuleInfo *, std::list<std::string>> LogicalImagerRuleSet::getFullFilePaths() const {
     return m_fullFilePaths;
 }
diff --git a/tools/logicalimager/LogicalImagerRuleSet.h b/tools/logicalimager/LogicalImagerRuleSet.h
index 32e612732109dc6f694e5a5da46eccea947779d8..bf7c7eebd72f9854ee598ad0ac6796e26d90b4ba 100644
--- a/tools/logicalimager/LogicalImagerRuleSet.h
+++ b/tools/logicalimager/LogicalImagerRuleSet.h
@@ -22,7 +22,7 @@
 
 #include "tsk/tsk_tools_i.h"
 #include "LogicalImagerRuleBase.h"
-#include "RuleMatchResult.h"
+#include "MatchedRuleInfo.h"
 #include "json.h"
 
 /**
@@ -32,20 +32,20 @@
 class LogicalImagerRuleSet
 {
 public:
-    typedef TSK_RETVAL_ENUM(*matchCallback)(const RuleMatchResult *, TSK_FS_FILE *, const char *);
+    typedef TSK_RETVAL_ENUM(*matchCallback)(const MatchedRuleInfo *, TSK_FS_FILE *, const char *);
 
     LogicalImagerRuleSet();
     ~LogicalImagerRuleSet();
 
     bool matches(TSK_FS_FILE *fs_file, const char *path, matchCallback callbackFunc) const;
-    const std::pair<const RuleMatchResult *, std::list<std::string>> getFullFilePaths() const;
+    const std::pair<const MatchedRuleInfo *, std::list<std::string>> getFullFilePaths() const;
 
-    const std::vector<std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>> getRules() {
+    const std::vector<std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>> getRules() {
         return m_rules;
     }
 
     void constructRuleSet(const nlohmann::json ruleSet, 
-        std::vector<std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>> &ourRules
+        std::vector<std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>> &ourRules
     );
 
 private:
@@ -53,6 +53,6 @@ class LogicalImagerRuleSet
     void constructRule(const std::string &ruleSetName, nlohmann::json rule);
 
     std::string m_ruleSetName;
-    std::vector<std::pair<const RuleMatchResult *, std::vector<LogicalImagerRuleBase *>>> m_rules;
-    std::pair<const RuleMatchResult *, std::list<std::string>> m_fullFilePaths;
+    std::vector<std::pair<const MatchedRuleInfo *, std::vector<LogicalImagerRuleBase *>>> m_rules;
+    std::pair<const MatchedRuleInfo *, std::list<std::string>> m_fullFilePaths;
 };
diff --git a/tools/logicalimager/RuleMatchResult.cpp b/tools/logicalimager/MatchedRuleInfo.cpp
old mode 100755
new mode 100644
similarity index 71%
rename from tools/logicalimager/RuleMatchResult.cpp
rename to tools/logicalimager/MatchedRuleInfo.cpp
index 5daf8d7ef1c5d990e4e22181ad20b0fe21963ee8..84e549792d30dc684ed5d3642078595e73a9388a
--- a/tools/logicalimager/RuleMatchResult.cpp
+++ b/tools/logicalimager/MatchedRuleInfo.cpp
@@ -9,13 +9,13 @@
 */
 
 /**
-* \file RuleMatchResult.cpp
-* Contains C++ code that implement the Rule Match Resultt class.
+* \file MatchedRuleInfo.cpp
+* Contains C++ code that implement the Matched Rule Info class.
 */
 
-#include "RuleMatchResult.h"
+#include "MatchedRuleInfo.h"
 
-RuleMatchResult::RuleMatchResult(const std::string &ruleSetName, const std::string &name, 
+MatchedRuleInfo::MatchedRuleInfo(const std::string &ruleSetName, const std::string &name, 
     const std::string &description, bool shouldSave, bool shouldAlert) :
     m_ruleSetName(ruleSetName),
     m_name(name),
diff --git a/tools/logicalimager/RuleMatchResult.h b/tools/logicalimager/MatchedRuleInfo.h
old mode 100755
new mode 100644
similarity index 77%
rename from tools/logicalimager/RuleMatchResult.h
rename to tools/logicalimager/MatchedRuleInfo.h
index 3919e0f571acfd5f9540eb72114c283946a309ce..0463d924b3f01d114ffe746e6526374114a18dea
--- a/tools/logicalimager/RuleMatchResult.h
+++ b/tools/logicalimager/MatchedRuleInfo.h
@@ -9,8 +9,8 @@
 */
 
 /**
-* \file RuleMatchResult.h
-* Contains the class definitions for the Rule Match Result.
+* \file MatchedRuleInfo.h
+* Contains the class definitions for the Matched Rule Info.
 */
 
 #pragma once
@@ -18,15 +18,15 @@
 #include <string>
 
 /**
-* Defines the rule match result
+* Defines the matched rule information
 *
 */
-class RuleMatchResult
+class MatchedRuleInfo
 {
 public:
-    RuleMatchResult(const std::string &ruleSetName, const std::string &name, 
+    MatchedRuleInfo(const std::string &ruleSetName, const std::string &name, 
         const std::string &description, bool shouldSave = true, bool shouldAlert = false);
-    ~RuleMatchResult() {}
+    ~MatchedRuleInfo() {}
 
     const std::string getRuleSetName() const { return m_ruleSetName; }
     const std::string getName() const { return m_name; }
diff --git a/tools/logicalimager/RegFileInfo.h b/tools/logicalimager/RegFileInfo.h
index e14add92df8f8ff692721ada6214476e920de22a..2c16fabb69de110556dc25fd9886c85792d73a22 100755
--- a/tools/logicalimager/RegFileInfo.h
+++ b/tools/logicalimager/RegFileInfo.h
@@ -24,7 +24,7 @@
 class RegFileInfo {
 public:
     RegFileInfo(std::string &aName, std::string &aPath, RegHiveType::Enum a_hiveType, TSK_OFF_T aOff, TSK_INUM_T aMetaAddr, RegParser *a_regParser);
-    ~RegFileInfo(void);
+    ~RegFileInfo();
 
     static RegHiveType::Enum hiveNameToType(const std::string &aName);
 
diff --git a/tools/logicalimager/RegKey.cpp b/tools/logicalimager/RegKey.cpp
index 044c08c915ecd1bfcacf8a7fcc390416a097d9de..35c47460c2053fca68f702557d6b8adbf9b4fb69 100755
--- a/tools/logicalimager/RegKey.cpp
+++ b/tools/logicalimager/RegKey.cpp
@@ -49,6 +49,9 @@ int RegKey::initialize(const Rejistry::RegistryKey *regKey) {
     }
 
     m_keyName = regKey->getName();
+    // TODO - replace the following 2 lines when these methods are available in PR #1665
+    // m_numSubkeys = regKey->getSubkeyListSize();
+    // m_numValues = regKey->getValueListSize();
     m_numSubkeys = regKey->getSubkeyList().size();
     m_numValues = regKey->getValueList().size();
     uint64_t timestamp = regKey->getTimestamp();
diff --git a/tools/logicalimager/RegParser.cpp b/tools/logicalimager/RegParser.cpp
index a7c728cacc979d57fda39ff829b877de3f1e1f0f..f6d22528962e62540057779693400e70cece5ef1 100755
--- a/tools/logicalimager/RegParser.cpp
+++ b/tools/logicalimager/RegParser.cpp
@@ -11,6 +11,7 @@
 #include <iostream>
 
 #include "RegParser.h"
+#include "ReportUtil.h"
 
 RegParser::RegParser(const RegHiveType::Enum aHiveType)
     : m_registryHive(NULL), m_rootKey(NULL) {
@@ -42,7 +43,7 @@ RegParser::~RegParser() {
  */
 int RegParser::loadHive(TSK_FS_FILE *aHiveFile, RegHiveType::Enum aHiveType) {
     if (aHiveFile == NULL) {
-        std::cerr << "Null pointer passed to RegParser::loadHive. loadHive() failed." << std::endl;
+        ReportUtil::consoleOutput(stderr, "Null pointer passed to RegParser::loadHive. loadHive() failed.\n");
         return -1;
     }
 
@@ -55,14 +56,14 @@ int RegParser::loadHive(TSK_FS_FILE *aHiveFile, RegHiveType::Enum aHiveType) {
     // Read the contents of the TSK_FS_FILE into memory.
     uint8_t *registryBuffer;
     if ((registryBuffer = (uint8_t *)malloc((size_t)aHiveFile->meta->size)) == NULL) {
-        std::cerr << "loadHive(): Error allocating memory for hive file. tsk_fs_file_read() failed." << std::endl;
+        ReportUtil::consoleOutput(stderr, "loadHive(): Error allocating memory for hive file. tsk_fs_file_read() failed.\n");
         return -1;
     }
 
     ssize_t bytesRead = tsk_fs_file_read(aHiveFile, 0, (char *)&registryBuffer[0],
         (size_t)aHiveFile->meta->size, TSK_FS_FILE_READ_FLAG_NONE);
     if (bytesRead != aHiveFile->meta->size) {
-        std::cerr << "loadHive(): Error reading content from hive file. tsk_fs_file_read() failed." << std::endl;
+        ReportUtil::consoleOutput(stderr, "loadHive(): Error reading content from hive file. tsk_fs_file_read() failed.\n");
         free(registryBuffer);
         return -1;
     }
@@ -71,12 +72,12 @@ int RegParser::loadHive(TSK_FS_FILE *aHiveFile, RegHiveType::Enum aHiveType) {
         m_registryHive = new Rejistry::RegistryHiveBuffer(registryBuffer, (uint32_t)aHiveFile->meta->size);
     }
     catch (Rejistry::RegistryParseException &) {
-        std::cerr << "loadHive(): Error creating RegistryHiveBuffer.  Likely because of memory size." << std::endl;
+        ReportUtil::consoleOutput(stderr, "loadHive(): Error creating RegistryHiveBuffer.  Likely because of memory size.\n");
         free(registryBuffer);
         return -1;
     }
     catch (...) {
-        std::cerr << "loadHive(): Error creating RegistryHiveBuffer (general exception).  Likely because of memory size." << std::endl;
+        ReportUtil::consoleOutput(stderr, "loadHive(): Error creating RegistryHiveBuffer (general exception).  Likely because of memory size.\n");
         free(registryBuffer);
         return -1;
     }
@@ -131,6 +132,10 @@ int RegParser::getKey(const std::wstring &keyName, RegKey &aKey) {
     }
 
     aKey.initialize(key);
+
+    if (key != NULL) {
+        delete key;
+    }
     return 0;
 }
 
@@ -238,6 +243,7 @@ int RegParser::getValue(const std::wstring &keyName, const std::wstring &valName
         std::auto_ptr<Rejistry::RegistryKey const> key(findKey(keyName));
         Rejistry::RegistryValue *value = key->getValue(valName);
         val.initialize(value);
+        delete value;
     }
     catch (Rejistry::NoSuchElementException&) {
         return -1;
@@ -278,6 +284,7 @@ int RegParser::getValue(const RegKey *startKey, const std::wstring &subpathName,
         std::auto_ptr<Rejistry::RegistryKey const> key(findKey(subpathName, startKey->getRegistryKey()));
         Rejistry::RegistryValue *value = key->getValue(valName);
         val.initialize(value);
+        delete value;
     }
     catch (Rejistry::NoSuchElementException&) {
         return -1;
diff --git a/tools/logicalimager/RegVal.cpp b/tools/logicalimager/RegVal.cpp
index 57d4237419af9f54499c18280e04e269a7dd4e66..60bb7774697c444d2eb3ec1248da4816f5a6f8b1 100755
--- a/tools/logicalimager/RegVal.cpp
+++ b/tools/logicalimager/RegVal.cpp
@@ -14,6 +14,7 @@
 #include <iomanip>
 
 #include "RegVal.h"
+#include "ReportUtil.h"
 
 std::string ValTypStrArr[] = {
   "REG_NONE",                    // ( 0 )   // No value type
@@ -109,6 +110,12 @@ RegVal::RegVal(const Rejistry::RegistryValue *value) {
     initialize(value);
 }
 
+RegVal::~RegVal() {
+    if (m_registryValue) {
+        delete m_registryValue;
+    }
+}
+
 /*
 * Initialize a RegVal object
 * 
@@ -157,10 +164,11 @@ int RegVal::initialize(const Rejistry::RegistryValue *value) {
             // This shouldn't happen because we check the range above.
             break;
         }
+        delete valueData;
     }
     catch (Rejistry::RegistryParseException& e)
     {
-        std::cerr << "Failed to initialize registry value due to registry parse exception: " << e.message() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Failed to initialize registry value due to registry parse exception: %s\n", e.message().c_str());
         return -1;
     }
     return 0;
diff --git a/tools/logicalimager/RegVal.h b/tools/logicalimager/RegVal.h
index 525301e416b87c45cc90b7a1ba07c7fda2ecb01c..94606780b86aee3304cbcd5d99868e74ee1f5f14 100755
--- a/tools/logicalimager/RegVal.h
+++ b/tools/logicalimager/RegVal.h
@@ -31,6 +31,8 @@ class RegVal
     RegVal(std::wstring &valName, int valType, long valLen, unsigned char *binData); // bin data
     RegVal(const Rejistry::RegistryValue *value);
 
+    ~RegVal();
+
     int initialize(const Rejistry::RegistryValue *value);
 
     void setValName(std::wstring &valName) { m_valName = valName; }
diff --git a/tools/logicalimager/RegistryAnalyzer.cpp b/tools/logicalimager/RegistryAnalyzer.cpp
index 0911d537ebcf234ebf4ae3c53fa0bfc9f2cefe72..2c74a2ca1a6fdff029ab2c02e79ddfd407ae21d0 100755
--- a/tools/logicalimager/RegistryAnalyzer.cpp
+++ b/tools/logicalimager/RegistryAnalyzer.cpp
@@ -2,7 +2,7 @@
 ** The Sleuth Kit
 **
 ** Brian Carrier [carrier <at> sleuthkit [dot] org]
-** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+** Copyright (c) 2019 Basis Technology.  All Rights reserved
 **
 ** This software is distributed under the Common Public License 1.0
 **
@@ -24,7 +24,7 @@
 #include "RegistryLoader.h"
 #include "UserAccount.h"
 
-extern void consoleOutput(FILE *fd, const char *msg, ...);
+#include "ReportUtil.h"
 
 const std::string LOCAL_DOMAIN = "local";
 
@@ -38,7 +38,7 @@ RegistryAnalyzer::RegistryAnalyzer(const std::string &outputFilePath) :
 {
     m_outputFile = fopen(m_outputFilePath.c_str(), "w");
     if (!m_outputFile) {
-        consoleOutput(stdout, "ERROR: Failed to open file %s\n", m_outputFilePath.c_str());
+        ReportUtil::consoleOutput(stdout, "ERROR: Failed to open file %s\n", m_outputFilePath.c_str());
         exit(1);
     }
 
@@ -56,6 +56,7 @@ RegistryAnalyzer::RegistryAnalyzer(const std::string &outputFilePath) :
 RegistryAnalyzer::~RegistryAnalyzer() {
     if (m_outputFile) {
         fclose(m_outputFile);
+        m_outputFile = NULL;
     }
 }
 
@@ -191,10 +192,13 @@ USER_ADMIN_PRIV::Enum samUserTypeToAdminPriv(uint32_t& acctType) {
 
 int RegistryAnalyzer::analyzeSAMUsers() const {
     std::map<std::wstring, FILETIME> acctCreationDateMap;
-    RegFileInfo *aRegFile = RegistryLoader::getInstance().getSAMHive();
+    RegistryLoader *registryLoader = new RegistryLoader();
+
+    RegFileInfo *aRegFile = registryLoader->getSAMHive();
     if (aRegFile == NULL) {
         fprintf(m_outputFile, "SAM HIVE not found\n");
         fclose(m_outputFile);
+        delete registryLoader;
         return -1;
     }
     RegParser &aRegParser = aRegFile->getRegParser();
@@ -221,8 +225,8 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
         }
         else if (-2 == rc) {
             std::string errMsg = "analyzeSAMUsers: Error getting key  = " + TskHelper::toNarrow(wsSAMUserNamesKeyName) + 
-                " Local user accounts may not be reported.";
-            std::cerr << errMsg << std::endl;
+                " Local user accounts may not be reported.\n";
+            ReportUtil::consoleOutput(stderr, errMsg.c_str());
             rc = -1;
         }
 
@@ -246,12 +250,12 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
                     USER_ADMIN_PRIV::Enum acctAdminPriv;
 
                     // Get V Record
-                    RegVal vRecord;
+                    RegVal *vRecord = new RegVal();
                     std::wstring wsVRecordValname = L"V";
-                    vRecord.setValName(wsVRecordValname);
-                    if (0 == aRegParser.getValue(wsSAMRIDKeyName, wsVRecordValname, vRecord)) {
+                    vRecord->setValName(wsVRecordValname);
+                    if (0 == aRegParser.getValue(wsSAMRIDKeyName, wsVRecordValname, *vRecord)) {
                         uint32_t samAcctType = 0;
-                        if (parseSAMVRecord(vRecord.getBinary(), vRecord.getValLen(), wsUserName, wsFullName, wsComment, samAcctType)) {
+                        if (parseSAMVRecord(vRecord->getBinary(), vRecord->getValLen(), wsUserName, wsFullName, wsComment, samAcctType)) {
                             bError = true;
                         }
                         else {
@@ -264,6 +268,8 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
                         bError = true;
                     }
 
+                    delete vRecord;
+
                     FILETIME lastLoginDate = { 0,0 };
                     FILETIME lastPWResetDate = { 0,0 };
                     FILETIME accountExpiryDate = { 0,0 };
@@ -278,14 +284,14 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
                     bool accountDisabled = false;
 
                     // GET F Record
-                    RegVal fRecord;
+                    RegVal *fRecord = new RegVal();
                     std::wstring wsFRecordValname = L"F";
-                    fRecord.setValName(wsFRecordValname);
+                    fRecord->setValName(wsFRecordValname);
 
-                    if (0 == aRegParser.getValue(wsSAMRIDKeyName, wsFRecordValname, fRecord)) {
+                    if (0 == aRegParser.getValue(wsSAMRIDKeyName, wsFRecordValname, *fRecord)) {
                         uint16_t acbFlags = 0;
                         // Parse F Record
-                        parseSAMFRecord(fRecord.getBinary(), fRecord.getValLen(), lastLoginDate, lastPWResetDate, 
+                        parseSAMFRecord(fRecord->getBinary(), fRecord->getValLen(), lastLoginDate, lastPWResetDate,
                             accountExpiryDate, lastFailedLoginDate, loginCount, acbFlags);
 
                         sLastLoginDate = FiletimeToStr(lastLoginDate);
@@ -297,13 +303,16 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
                             sDateCreated = FiletimeToStr(it->second);
                         }
                         else {
-                            std::wcerr << "User name = " << wsUserName << " not found in acctCreationDateMap" << std::endl;
+                            std::string msg = TskHelper::toNarrow(L"User name = " + wsUserName + L" not found in acctCreationDateMap\n");
+                            ReportUtil::consoleOutput(stderr, msg.c_str());
                         }
 
                         if ((acbFlags & 0x0001) == 0x0001)
                             accountDisabled = true;
                     }
 
+                    delete fRecord;
+
                     if (!bError) {
 
                         // SAM is parsed first and has only local accounts. We assume none of these users already exist.
@@ -343,8 +352,8 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
         else {
             std::string errMsg = "analyzeSAMUsers: Error getting key  = "
                 + TskHelper::toNarrow(wsSAMUsersKeyName)
-                + " Local user accounts may not be reported.";
-            std::cerr << errMsg << std::endl;
+                + " Local user accounts may not be reported.\n";
+            ReportUtil::consoleOutput(stderr, errMsg.c_str());
             rc = -1;
         }
     }
@@ -354,13 +363,14 @@ int RegistryAnalyzer::analyzeSAMUsers() const {
             std::rethrow_exception(eptr);
         }
         catch (const std::exception& e) {
-            std::string errMsg = "RegisteryAnalyzer: Uncaught exception in analyzeSAMUsers.";
-            std::cerr << errMsg << std::endl;
-            std::cerr << e.what() << std::endl;
+            std::string errMsg = "RegisteryAnalyzer: Uncaught exception in analyzeSAMUsers.\n";
+            ReportUtil::consoleOutput(stderr, errMsg.c_str());
+            ReportUtil::consoleOutput(stderr, e.what());
         }
         rc = -1;
     }
     fclose(m_outputFile);
+    delete registryLoader;
     return rc;
 }
 
@@ -420,7 +430,7 @@ int RegistryAnalyzer::parseSAMVRecord(const unsigned char *pVRec, size_t aVRecLe
     comment = L"";
 
     if (aVRecLen < 44) {
-        std::cerr << "ERROR: SAMV record too short" << std::endl;
+        ReportUtil::consoleOutput(stderr, "ERROR: SAMV record too short\n");
         return -1;
     }
 
@@ -432,7 +442,7 @@ int RegistryAnalyzer::parseSAMVRecord(const unsigned char *pVRec, size_t aVRecLe
     len = makeDWORD(&pVRec[16]);
 
     if ((off >= aVRecLen) || (off + len > aVRecLen)) {
-        std::cerr << "ERROR: SAMV record too short" << std::endl;
+        ReportUtil::consoleOutput(stderr, "ERROR: SAMV record too short\n");
         return -1;
     }
     userName = utf16LEToWString(&pVRec[off], len);
@@ -442,7 +452,7 @@ int RegistryAnalyzer::parseSAMVRecord(const unsigned char *pVRec, size_t aVRecLe
     len = makeDWORD(&pVRec[28]);
     if (len > 0) {
         if (off + len > aVRecLen) {
-            std::cerr << "ERROR: SAMV record too short" << std::endl;
+            ReportUtil::consoleOutput(stderr, "ERROR: SAMV record too short\n");
             return -1;
         }
         userFullName = utf16LEToWString(&pVRec[off], len);
@@ -453,7 +463,7 @@ int RegistryAnalyzer::parseSAMVRecord(const unsigned char *pVRec, size_t aVRecLe
     len = makeDWORD(&pVRec[40]);
     if (len > 0) {
         if (off + len > aVRecLen) {
-            std::cerr << "ERROR: SAMV record too short" << std::endl;
+            ReportUtil::consoleOutput(stderr, "ERROR: SAMV record too short\n");
             return -1;
         }
         comment = utf16LEToWString(&pVRec[off], len);
@@ -487,7 +497,7 @@ int RegistryAnalyzer::parseSAMFRecord(const unsigned char *pFRec, long aFRecLen,
     FILETIME tv;
 
     if (aFRecLen < 68) {
-        std::cerr << "ERROR: SAMF record too short" << std::endl;
+        ReportUtil::consoleOutput(stderr, "ERROR: SAMF record too short\n");
         return -1;
     }
 
diff --git a/tools/logicalimager/RegistryAnalyzer.h b/tools/logicalimager/RegistryAnalyzer.h
index 9e2029db051b852fb89b25dea934bf5ecf74309a..74c699d5bfd4f522fbabea64c0e36489590189fd 100755
--- a/tools/logicalimager/RegistryAnalyzer.h
+++ b/tools/logicalimager/RegistryAnalyzer.h
@@ -25,7 +25,6 @@ class RegistryAnalyzer {
 public:
     RegistryAnalyzer(const std::string &outputFilePath);
     ~RegistryAnalyzer();
-
     int analyzeSAMUsers() const;
 
 private:
diff --git a/tools/logicalimager/RegistryLoader.cpp b/tools/logicalimager/RegistryLoader.cpp
index 98f65f843cf3d686b1b39f56f4952841d2920735..61641326ac0ddd027d0ae37e0eb927a68d54b867 100755
--- a/tools/logicalimager/RegistryLoader.cpp
+++ b/tools/logicalimager/RegistryLoader.cpp
@@ -2,7 +2,7 @@
 ** The Sleuth Kit
 **
 ** Brian Carrier [carrier <at> sleuthkit [dot] org]
-** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+** Copyright (c) 2019 Basis Technology.  All Rights reserved
 **
 ** This software is distributed under the Common Public License 1.0
 **
@@ -13,6 +13,7 @@
 
 #include "RegistryLoader.h"
 #include "TskHelper.h"
+#include "ReportUtil.h"
 
 /** Responsible for loading and caching registry hives for the various modules that will need it. */
 
@@ -248,11 +249,10 @@ int RegistryLoader::findSystemRegFiles(TSK_FS_INFO *a_fs_info) {
     const std::string SYS_REG_FILES_DIR = "/Windows/System32/config";
 
     TSKFileNameInfo filenameInfo;
-    TSK_FS_FILE *fsFile;
-    int8_t retval = TskHelper::getInstance().path2Inum(a_fs_info, SYS_REG_FILES_DIR.c_str(), false, filenameInfo, NULL, &fsFile);
+    int8_t retval = TskHelper::getInstance().path2Inum(a_fs_info, SYS_REG_FILES_DIR.c_str(), false, filenameInfo, NULL, NULL);
     if (retval == -1) {
-        std::cerr << "Error in finding system Registry files. System Registry files will not be analyzed." << std::endl;
-        std::cerr << "findSystemRegFiles(): path2inum() failed for dir = " << SYS_REG_FILES_DIR << ", errno = " << tsk_error_get() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Error in finding system Registry files. System Registry files will not be analyzed.\n");
+        ReportUtil::consoleOutput(stderr, "findSystemRegFiles(): path2inum() failed for dir = %s, errno = %s\n", SYS_REG_FILES_DIR.c_str(), tsk_error_get());
         return -1;
     }
     else if (retval > 0) { // not found   // @@@ ACTUALLY CHECK IF IT IS #2
@@ -262,9 +262,9 @@ int RegistryLoader::findSystemRegFiles(TSK_FS_INFO *a_fs_info) {
     // open the directory
     TSK_FS_DIR *fs_dir;
     if ((fs_dir = tsk_fs_dir_open_meta(a_fs_info, filenameInfo.getINUM())) == NULL) {
-        std::cerr << "Error opening windows/system32/config folder. Some System Registry files may not be analyzed.";
-        std::cerr << "findSystemRegFiles(): tsk_fs_dir_open_meta() failed for windows/system32/config folder.  dir inum = " << 
-            filenameInfo.getINUM() << ", errno = " << tsk_error_get() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Error opening windows/system32/config folder. Some System Registry files may not be analyzed.\n");
+        ReportUtil::consoleOutput(stderr, "findSystemRegFiles(): tsk_fs_dir_open_meta() failed for windows/system32/config folder.  dir inum = %" PRIuINUM ", errno = %s\n",
+            filenameInfo.getINUM(), tsk_error_get());
         return -1;
     }
 
@@ -276,9 +276,9 @@ int RegistryLoader::findSystemRegFiles(TSK_FS_INFO *a_fs_info) {
         // get the entry
         const TSK_FS_NAME *fs_name;
         if ((fs_name = tsk_fs_dir_get_name(fs_dir, i)) == NULL) {
-            std::cerr << "Error in finding System Registry files. Some System Registry files may not be analyzed." << std::endl;
-            std::cerr << "findSystemRegFiles(): Error getting directory entry = " << i << " in dir inum = " << filenameInfo.getINUM() << 
-                ", errno = " << tsk_error_get() << ", some System Registry files may not be analyzed." << std::endl;
+            ReportUtil::consoleOutput(stderr, "Error in finding System Registry files. Some System Registry files may not be analyzed.\n");
+            ReportUtil::consoleOutput(stderr, "findSystemRegFiles(): Error getting directory entry = %zu in dir inum = %" PRIuINUM
+                ", errno = %s, some System Registry files may not be analyzed.\n", i, filenameInfo.getINUM(), tsk_error_get());
             continue;
         }
 
@@ -295,15 +295,15 @@ int RegistryLoader::findSystemRegFiles(TSK_FS_INFO *a_fs_info) {
             // @@ FIX THE ERROR MSGS HERE
             TSK_FS_FILE *fs_file;
             if ((fs_file = tsk_fs_dir_get(fs_dir, i)) == NULL) {
-                std::cerr << "Error in loading Registry file. The Registry file will not be analyzed." << std::endl;
-                std::cerr <<  "findSystemRegFiles(): tsk_fs_dir_get failed for file = fs_file is null." << std::endl;
+                ReportUtil::consoleOutput(stderr, "Error in loading Registry file. The Registry file will not be analyzed.\n");
+                ReportUtil::consoleOutput(stderr, "findSystemRegFiles(): tsk_fs_dir_get failed for file = fs_file is null.\n");
                 continue;
             }
 
             RegParser *pRegParser = new RegParser(hiveType);
             if (0 != pRegParser->loadHive(fs_file, hiveType)) {
-                std::cerr << "Error in loading Registry file. The Registry file will not be analyzed." << std::endl;
-                std::cerr << "findSystemRegFiles(): loadHive() failed for file = " << fs_file->name->name << std::endl;
+                ReportUtil::consoleOutput(stderr, "Error in loading Registry file. The Registry file will not be analyzed.\n");
+                ReportUtil::consoleOutput(stderr, "findSystemRegFiles(): loadHive() failed for file = %s\n", fs_file->name->name);
                 continue;
             }
 
@@ -346,12 +346,11 @@ int RegistryLoader::findUserRegFiles(TSK_FS_INFO *a_fs_info) {
 int RegistryLoader::findUserRegFiles(TSK_FS_INFO *a_fs_info, const std::string &a_starting_dir) {
     TSK_FS_DIR *fs_dir;
     TSKFileNameInfo filenameInfo;
-    TSK_FS_FILE *fsFile;
-    int8_t retval = TskHelper::getInstance().path2Inum(a_fs_info, a_starting_dir.c_str(), false, filenameInfo, NULL, &fsFile);
+    int8_t retval = TskHelper::getInstance().path2Inum(a_fs_info, a_starting_dir.c_str(), false, filenameInfo, NULL, NULL);
 
     if (retval == -1) {
-        std::cerr << "Error in finding User Registry files. Some User Registry files may not be analyzed." << std::endl;
-        std::cerr << "findUserRegFiles(): tsk_fs_path2inum() failed for dir = " << a_starting_dir << ", errno = " << tsk_error_get() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Error in finding User Registry files. Some User Registry files may not be analyzed.\n");
+        ReportUtil::consoleOutput(stderr, "findUserRegFiles(): tsk_fs_path2inum() failed for dir = %s, errno = %s\n", a_starting_dir.c_str(), tsk_error_get());
         return -1;
     }
     else if (retval > 0) { // not found
@@ -360,8 +359,8 @@ int RegistryLoader::findUserRegFiles(TSK_FS_INFO *a_fs_info, const std::string &
 
     // open the directory
     if ((fs_dir = tsk_fs_dir_open_meta(a_fs_info, filenameInfo.getINUM())) == NULL) {
-        std::cerr << "Error in finding User Registry files. Some User Registry files may not be analyzed." << std::endl;
-        std::cerr << "findUserRegFiles(): tsk_fs_dir_open_meta() failed for dir = " << a_starting_dir << ", errno = " << tsk_error_get() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Error in finding User Registry files. Some User Registry files may not be analyzed.\n");;
+        ReportUtil::consoleOutput(stderr, "findUserRegFiles(): tsk_fs_dir_open_meta() failed for dir = %s, errno = %s\n", a_starting_dir.c_str(), tsk_error_get());
         return -1;
     }
 
@@ -372,9 +371,9 @@ int RegistryLoader::findUserRegFiles(TSK_FS_INFO *a_fs_info, const std::string &
 
         // get the entry
         if ((fs_file = tsk_fs_dir_get(fs_dir, i)) == NULL) {
-            std::cerr << "Error in finding User Registry files. Some User Registry files may not be analyzed." << std::endl;
-            std::cerr << "findUserRegFiles(): Error getting directory entry = " << i << " in dir inum = " << filenameInfo.getINUM() << 
-                ", errno = " << tsk_error_get() << std::endl;
+            ReportUtil::consoleOutput(stderr, "Error in finding User Registry files. Some User Registry files may not be analyzed.\n");
+            ReportUtil::consoleOutput(stderr, "findUserRegFiles(): Error getting directory entry = %zu in dir inum = %" PRIuINUM ", errno = %s\n", 
+                i, filenameInfo.getINUM(), tsk_error_get());
             continue;
         }
 
@@ -410,8 +409,8 @@ int RegistryLoader::findNTUserRegFilesInDir(TSK_FS_INFO *a_fs_info, TSK_INUM_T a
 
     // 1. open the directory
     if ((fs_dir = tsk_fs_dir_open_meta(a_fs_info, a_dir_inum)) == NULL) {
-        std::cerr << "Error in finding NTUSER Registry files. Some User Registry files may not be analyzed." << std::endl;
-        std::cerr << "findNTUserRegFilesInDir(): tsk_fs_dir_open_meta() failed for dir = " << aUserDirName << ", errno = " << tsk_error_get() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Error in finding NTUSER Registry files. Some User Registry files may not be analyzed.\n");
+        ReportUtil::consoleOutput(stderr, "findNTUserRegFilesInDir(): tsk_fs_dir_open_meta() failed for dir = %s, errno = %s\n", aUserDirName.c_str(), tsk_error_get());
         return -1;
     }
 
@@ -423,9 +422,9 @@ int RegistryLoader::findNTUserRegFilesInDir(TSK_FS_INFO *a_fs_info, TSK_INUM_T a
         // get the entry
         const TSK_FS_NAME *fs_name;
         if ((fs_name = tsk_fs_dir_get_name(fs_dir, i)) == NULL) {
-            std::cerr << "Error in finding NTUSER Registry files. Some User Registry files may not be analyzed." << std::endl;
-            std::cerr << "findNTUserRegFilesInDir(): Error getting directory entry = " << i << " in dir inum = " << a_dir_inum << 
-                ", errno = " << tsk_error_get() << std::endl;
+            ReportUtil::consoleOutput(stderr, "Error in finding NTUSER Registry files. Some User Registry files may not be analyzed.\n");
+            ReportUtil::consoleOutput(stderr, "findNTUserRegFilesInDir(): Error getting directory entry = %zu in dir inum = %" PRIuINUM ", errno = %s\n", 
+                i, a_dir_inum, tsk_error_get());
             continue;
         }
 
@@ -439,15 +438,15 @@ int RegistryLoader::findNTUserRegFilesInDir(TSK_FS_INFO *a_fs_info, TSK_INUM_T a
 
             TSK_FS_FILE *fs_file;
             if ((fs_file = tsk_fs_dir_get(fs_dir, i)) == NULL) {
-                std::cerr << "Error in loading Registry file. The Registry file will not be analyzed." << std::endl;
-                std::cerr << "findNTUserRegFilesInDir(): tsk_fs_dir_get() failed for file = fs_file is null." << std::endl;
+                ReportUtil::consoleOutput(stderr, "Error in loading Registry file. The Registry file will not be analyzed.\n");
+                ReportUtil::consoleOutput(stderr, "findNTUserRegFilesInDir(): tsk_fs_dir_get() failed for file = fs_file is null.\n");
                 continue;
             }
 
             RegParser *pRegParser = new RegParser(hiveType);
             if (0 != pRegParser->loadHive(fs_file, hiveType)) {
-                std::cerr << "Error in loading Registry file. The Registry file will not be analyzed." << std::endl;
-                std::cerr << "findNTUserRegFilesInDir(): loadHive() failed for file = " << fs_file->name->name << std::endl;
+                ReportUtil::consoleOutput(stderr, "Error in loading Registry file. The Registry file will not be analyzed.\n");
+                ReportUtil::consoleOutput(stderr, "findNTUserRegFilesInDir(): loadHive() failed for file = %s\n", fs_file->name->name);
                 continue;
             }
             RegFileInfo *pRegFileInfo = new RegFileInfo(fName, toNormalizedOutputPathName(a_userFolderPath + "/" + aUserDirName), hiveType, 
@@ -498,12 +497,12 @@ int RegistryLoader::findUsrClassRegFile(TSK_FS_INFO *a_fs_info, const std::strin
     }
 
     TSKFileNameInfo filenameInfo;
-    TSK_FS_FILE *fsFile;
-    int8_t retval = TskHelper::getInstance().path2Inum(a_fs_info, usrClassSubdir.c_str(), false, filenameInfo, NULL, &fsFile);
+    int8_t retval = TskHelper::getInstance().path2Inum(a_fs_info, usrClassSubdir.c_str(), false, filenameInfo, NULL, NULL);
 
     if (retval == -1) {
-        std::cerr << "Error in finding USRCLASS Registry files. Some User Registry files may not be analyzed." << std::endl;
-        std::cerr << "findUsrClassRegFile(): tsk_fs_path2inum() failed for dir = " << usrClassSubdir << ", errno = " << tsk_error_get() << std::endl;
+        ReportUtil::consoleOutput(stderr, "Error in finding USRCLASS Registry files. Some User Registry files may not be analyzed.\n");
+        ReportUtil::consoleOutput(stderr, "findUsrClassRegFile(): tsk_fs_path2inum() failed for dir = %s, errno = %s\n", 
+            usrClassSubdir.c_str(), tsk_error_get());
         return -1;
     }
     else if (retval == 0) {     //  found
@@ -512,9 +511,9 @@ int RegistryLoader::findUsrClassRegFile(TSK_FS_INFO *a_fs_info, const std::strin
 
         // open the directory
         if ((fs_dir = tsk_fs_dir_open_meta(a_fs_info, filenameInfo.getINUM())) == NULL) {
-            std::cerr << "Error in finding USRCLASS Registry files. Some User Registry files may not be analyzed." << std::endl;
-            std::cerr << "findUsrClassRegFile(): tsk_fs_dir_open_meta() failed for dir inum = " << filenameInfo.getINUM() << 
-                ", errno = " << tsk_error_get() << std::endl;
+            ReportUtil::consoleOutput(stderr, "Error in finding USRCLASS Registry files. Some User Registry files may not be analyzed.\n");
+            ReportUtil::consoleOutput(stderr, "findUsrClassRegFile(): tsk_fs_dir_open_meta() failed for dir inum = %" PRIuINUM ", errno = %s\n",
+                filenameInfo.getINUM(), tsk_error_get());
             return -1;
         }
 
@@ -525,9 +524,9 @@ int RegistryLoader::findUsrClassRegFile(TSK_FS_INFO *a_fs_info, const std::strin
 
             // get the entry
             if ((fs_file = tsk_fs_dir_get(fs_dir, i)) == NULL) {
-                std::cerr << "Error in finding USRCLASS Registry files. Some User Registry files may not be analyzed." << std::endl;
-                std::cerr << "findUsrClassRegFile(): Error getting directory entry = " << i << " in dir inum = " << filenameInfo.getINUM() << 
-                    ", errno = " << tsk_error_get() << std::endl;
+                ReportUtil::consoleOutput(stderr, "Error in finding USRCLASS Registry files. Some User Registry files may not be analyzed.\n");
+                ReportUtil::consoleOutput(stderr, "findUsrClassRegFile(): Error getting directory entry = %zu in dir inum = %" PRIuINUM ", errno = %s\n", 
+                    i, filenameInfo.getINUM(), tsk_error_get());
                 continue;
             }
 
@@ -543,8 +542,8 @@ int RegistryLoader::findUsrClassRegFile(TSK_FS_INFO *a_fs_info, const std::strin
 
                             RegParser *pRegParser = new RegParser(hiveType);
                             if (0 != pRegParser->loadHive(fs_file, hiveType)) {
-                                std::cerr << "Error in loading Registry file. The Registry file will not be analyzed." << std::endl;
-                                std::cerr << "findUsrClassRegFile(): loadHive() failed for file = " << fs_file->name->name << std::endl;
+                                ReportUtil::consoleOutput(stderr, "Error in loading Registry file. The Registry file will not be analyzed.\n");
+                                ReportUtil::consoleOutput(stderr, "findUsrClassRegFile(): loadHive() failed for file = %s\n", fs_file->name->name);
                                 return -1;
                             }
                             RegFileInfo *pRegFileInfo = new RegFileInfo(fName, toNormalizedOutputPathName(usrClassSubdir), hiveType, 
diff --git a/tools/logicalimager/ReportUtil.cpp b/tools/logicalimager/ReportUtil.cpp
new file mode 100644
index 0000000000000000000000000000000000000000..06a1ea12653ff6f7b40b83fca11bd77f771bf841
--- /dev/null
+++ b/tools/logicalimager/ReportUtil.cpp
@@ -0,0 +1,268 @@
+/*
+** The Sleuth Kit
+**
+** Brian Carrier [carrier <at> sleuthkit [dot] org]
+** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+/**
+* \file ReportUtil.cpp
+* Contains C++ code that implement the Report Util class.
+*/
+
+#include <iostream>
+#include <conio.h>
+#include <string>
+#include <list>
+#include <algorithm>
+#include <locale>
+#include <codecvt>
+#include <direct.h>
+#include <winsock2.h>
+#include <locale.h>
+#include <Wbemidl.h>
+#include <shlwapi.h>
+#include <fstream>
+#include <winbase.h>
+#include <comutil.h>
+
+#include "ReportUtil.h"
+#include "TskHelper.h"
+
+static std::string sessionDirCopy;
+static FILE *reportFile;
+static FILE *consoleFile;
+static bool promptBeforeExit = true;
+
+void ReportUtil::initialize(const std::string &sessionDir) {
+    sessionDirCopy = sessionDir;
+    std::string consoleFileName = sessionDir + "/console.txt";
+    ReportUtil::openConsoleOutput(consoleFileName);
+
+    std::string reportFilename = sessionDir + "/SearchResults.txt";
+    ReportUtil::openReport(reportFilename);
+}
+
+void ReportUtil::copyConfigFile(const std::wstring &configFilename) {
+    // copy the config file into the output session directory
+    std::ifstream src(TskHelper::toNarrow(configFilename), std::ios::binary);
+    std::ofstream dst(sessionDirCopy + "/config.json", std::ios::binary);
+    dst << src.rdbuf();
+    dst.close();
+    src.close();
+}
+
+/*
+* Create the report file and print the header.
+*
+* @param reportFilename Name of the report file
+*/
+void ReportUtil::openReport(const std::string &reportFilename) {
+    reportFile = fopen(reportFilename.c_str(), "w");
+    if (!reportFile) {
+        ReportUtil::consoleOutput(stderr, "ERROR: Failed to open report file %s\n", reportFilename.c_str());
+        handleExit(1);
+    }
+    fprintf(reportFile, "VHD file/directory\tFile system offset\tFile metadata adddress\tExtraction status\tRule set name\tRule name\tDescription\tFilename\tPath\tExtractFilePath\tcrtime\tmtime\tatime\tctime\n");
+}
+
+void ReportUtil::openConsoleOutput(const std::string &consoleFileName) {
+    consoleFile = fopen(consoleFileName.c_str(), "w");
+    if (!consoleFile) {
+        fprintf(stderr, "ERROR: Failed to open console file %s\n", consoleFileName.c_str());
+        handleExit(1);
+    }
+}
+
+void ReportUtil::logOutputToFile(const char *buf) {
+    if (consoleFile) {
+        fprintf(consoleFile, "%s", buf);
+    }
+}
+
+void ReportUtil::consoleOutput(FILE *fd, const char *msg, ...) {
+    char buf[2048];
+    va_list args;
+
+    va_start(args, msg);
+    vsnprintf(buf, sizeof(buf), msg, args);
+    fprintf(fd, "%s", buf);
+    // output to console file
+    logOutputToFile(buf);
+    va_end(args);
+}
+
+void ReportUtil::printDebug(char *msg, const char *fmt, ...) {
+    if (tsk_verbose) {
+        std::string prefix("tsk_logical_imager: ");
+        std::string message = prefix + msg + "\n";
+        tsk_fprintf(stderr, message.c_str(), fmt);
+    }
+}
+
+void ReportUtil::printDebug(char *msg) {
+    printDebug(msg, "");
+}
+
+/*
+* Write an file match result record to the report file. Also send a simple message to stdout, if shouldAlert is true.
+* A report file record contains tab-separated fields:
+*   - output VHD file/directory
+*   - File system offset
+*   - Metadata address
+*   - extractStatus
+*   - ruleSetName
+*   - ruleName
+*   - description
+*   - name
+*   - path
+*   - ExtractFilePath
+*   - crtime
+*   - mtime
+*   - atime
+*   - ctime
+*
+* @param outputLocation output VHD file or directory
+* @param extractStatus Extract status: TSK_OK if file was extracted, TSK_ERR otherwise
+* @param matchedRuleInfo The matched rule info
+* @param fs_file TSK_FS_FILE that matches
+* @param path Parent path of fs_file
+* @param extractedFilePath Extracted file path (non-VHD only)
+*/
+void ReportUtil::reportResult(const std::string &outputLocation, TSK_RETVAL_ENUM extractStatus, const MatchedRuleInfo *matchedRuleInfo, TSK_FS_FILE *fs_file, const char *path, const std::string &extractedFilePath) {
+    if (fs_file->name && (strcmp(fs_file->name->name, ".") == 0 || strcmp(fs_file->name->name, "..") == 0)) {
+        // Don't report . and ..
+        return;
+    }
+    if (extractStatus == TSK_ERR && (fs_file->meta == NULL || fs_file->meta->flags & TSK_FS_NAME_FLAG_UNALLOC)) {
+        // Don't report unallocated files that failed extraction
+        return;
+    }
+    // report file format is "VHD file<tab>File system offset<tab>file metadata address<tab>extractStatus<tab>ruleSetName<tab>ruleName<tab>description<tab>name<tab>path<tab>extracedFilePath<tab>crtime<tab>mtime<tab>atime<tab>ctime"
+    std::string crtimeStr = (fs_file->meta ? std::to_string(fs_file->meta->crtime) : "0");
+    std::string mtimeStr = (fs_file->meta ? std::to_string(fs_file->meta->mtime) : "0");
+    std::string atimeStr = (fs_file->meta ? std::to_string(fs_file->meta->atime) : "0");
+    std::string ctimeStr = (fs_file->meta ? std::to_string(fs_file->meta->ctime) : "0");
+    std::string origFileName(fs_file->name ? fs_file->name->name : "name is null");
+    std::string origFilePath(path);
+
+    // Remove any newlines
+    origFileName.erase(std::remove(origFileName.begin(), origFileName.end(), '\n'), origFileName.end());
+    origFileName.erase(std::remove(origFileName.begin(), origFileName.end(), '\r'), origFileName.end());
+    origFilePath.erase(std::remove(origFilePath.begin(), origFilePath.end(), '\n'), origFilePath.end());
+    origFilePath.erase(std::remove(origFilePath.begin(), origFilePath.end(), '\r'), origFilePath.end());
+
+
+    fprintf(reportFile, "%s\t%" PRIdOFF "\t%" PRIuINUM "\t%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n",
+        outputLocation.c_str(),
+        fs_file->fs_info->offset,
+        (fs_file->meta ? fs_file->meta->addr : 0),
+        extractStatus,
+        matchedRuleInfo->getRuleSetName().c_str(),
+        matchedRuleInfo->getName().c_str(),
+        matchedRuleInfo->getDescription().c_str(),
+        origFileName.c_str(),
+        origFilePath.c_str(),
+        extractedFilePath.c_str(),
+        crtimeStr.c_str(),
+        mtimeStr.c_str(),
+        atimeStr.c_str(),
+        ctimeStr.c_str()
+    );
+    fflush(reportFile);
+
+    std::string fullPath(path);
+    if (fs_file->name) {
+        fullPath += fs_file->name->name;
+    }
+    else {
+        fullPath += "name is null";
+    }
+
+    if (matchedRuleInfo->isShouldAlert()) {
+        ReportUtil::consoleOutput(stdout, "Alert for %s: %s\n",
+            matchedRuleInfo->getRuleSetName().c_str(),
+            fullPath.c_str());
+    }
+}
+
+/*
+* Close the report file.
+*/
+void ReportUtil::closeReport() {
+    if (reportFile) {
+        fclose(reportFile);
+        reportFile = NULL;
+    }
+}
+
+void ReportUtil::handleExit(int code) {
+    if (consoleFile) {
+        fclose(consoleFile);
+        consoleFile = NULL;
+    }
+    if (promptBeforeExit) {
+        std::cout << std::endl << "Press any key to exit";
+        (void)_getch();
+    }
+    exit(code);
+}
+
+/**
+* GetErrorStdStr - returns readable error message for the given error code
+*
+* @param err error code
+* @returns error message string
+*/
+std::string ReportUtil::GetErrorStdStr(DWORD err) {
+    return TskHelper::toNarrow(ReportUtil::GetErrorStdStrW(err));
+}
+
+/**
+* GetLastErrorStdStrW - returns readable widestring error message for the last error code as reported by GetLastError()
+*
+* @returns error message wide string
+*/
+std::wstring ReportUtil::GetLastErrorStdStrW() {
+    DWORD error = GetLastError();
+    return GetErrorStdStrW(error);
+}
+
+/**
+* GetErrorStdStrW - returns readable widestring error message for the given error code
+*
+* @param err error code
+* @returns error message wide string
+*/
+std::wstring ReportUtil::GetErrorStdStrW(DWORD a_err) {
+    if (ERROR_SUCCESS != a_err) {
+        LPVOID lpMsgBuf;
+        DWORD bufLen = FormatMessageW(
+            FORMAT_MESSAGE_ALLOCATE_BUFFER |
+            FORMAT_MESSAGE_FROM_SYSTEM |
+            FORMAT_MESSAGE_IGNORE_INSERTS,
+            NULL,
+            a_err,
+            MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+            (LPWSTR)&lpMsgBuf,
+            0, NULL);
+        if (bufLen) {
+            LPCWSTR lpMsgStr = (LPCWSTR)lpMsgBuf;
+            std::wstring result(lpMsgStr, lpMsgStr + bufLen);
+            size_t pos = result.find_last_not_of(L"\r\n");
+            if (pos != std::wstring::npos) {
+                result.resize(pos);
+            }
+            LocalFree(lpMsgBuf);
+            return result;
+        }
+    }
+    return std::wstring(L"no error");
+}
+
+void ReportUtil::SetPromptBeforeExit(bool flag) {
+    promptBeforeExit = flag;
+}
diff --git a/tools/logicalimager/ReportUtil.h b/tools/logicalimager/ReportUtil.h
new file mode 100644
index 0000000000000000000000000000000000000000..0b949ca5b55661471a9ef4ae47ff6b91a21bc706
--- /dev/null
+++ b/tools/logicalimager/ReportUtil.h
@@ -0,0 +1,43 @@
+/*
+** The Sleuth Kit
+**
+** Brian Carrier [carrier <at> sleuthkit [dot] org]
+** Copyright (c) 2010-2019 Brian Carrier.  All Rights reserved
+**
+** This software is distributed under the Common Public License 1.0
+**
+*/
+
+#pragma once
+
+#include <string>
+
+#include "MatchedRuleInfo.h"
+#include "tsk/libtsk.h"
+
+/**
+* Defines the Report Utilities
+*
+*/
+class ReportUtil {
+public:
+    static void initialize(const std::string &sessionDir);
+    static void copyConfigFile(const std::wstring &configFilename);
+    static void openReport(const std::string &alertFilename);
+    static void openConsoleOutput(const std::string &consoleFileName);
+    static void logOutputToFile(const char *buf);
+    static void consoleOutput(FILE *fd, const char *msg, ...);
+    static void printDebug(char *msg, const char *fmt, ...);
+    static void printDebug(char *msg);
+    static void closeReport();
+
+    static void reportResult(const std::string &outputLocation, TSK_RETVAL_ENUM extractStatus, 
+        const MatchedRuleInfo *ruleMatchResult, TSK_FS_FILE *fs_file, const char *path, const std::string &extractedFilePath);
+
+    static void SetPromptBeforeExit(bool flag);
+    static void handleExit(int code);
+
+    static std::wstring GetErrorStdStrW(DWORD a_err);
+    static std::wstring GetLastErrorStdStrW();
+    static std::string GetErrorStdStr(DWORD err);
+};
diff --git a/tools/logicalimager/TskFindFiles.cpp b/tools/logicalimager/TskFindFiles.cpp
index 9608571a275e8f31705201471f69495706292cb6..90fb85a93e917df928b3aab37ed000b712b533a5 100644
--- a/tools/logicalimager/TskFindFiles.cpp
+++ b/tools/logicalimager/TskFindFiles.cpp
@@ -26,15 +26,14 @@
 #include "tsk/fs/tsk_ntfs.h"
 #include "TskFindFiles.h"
 #include "TskHelper.h"
-
-extern void logOutputToFile(const char *buf);
+#include "ReportUtil.h"
 
 /**
  * Create the Find Files object given the Logical Imager Configuration
  * @param config LogicalImagerRuleSet to use for finding files
  */
-TskFindFiles::TskFindFiles(const LogicalImagerConfiguration *config, const std::string &driveToProcess) :
-    m_logicialImagerConfiguration(config), m_driveToProcess(driveToProcess)
+TskFindFiles::TskFindFiles(const LogicalImagerConfiguration *config, const std::string &driveName) :
+    m_logicialImagerConfiguration(config), m_driveDisplayName(driveName)
  {
     m_fileCounter = 0;
     m_totalNumberOfFiles = 0;
@@ -42,7 +41,7 @@ TskFindFiles::TskFindFiles(const LogicalImagerConfiguration *config, const std::
 }
 
 TskFindFiles::~TskFindFiles() {
-    std::string title = "Analyzing drive " + m_driveToProcess + " - Searching for files by attribute, 100% complete";
+    std::string title = "Analyzing drive " + m_driveDisplayName + " - Searching for files by attribute, 100% complete";
     SetConsoleTitleA(title.c_str());
 }
 
@@ -52,7 +51,7 @@ TskFindFiles::~TskFindFiles() {
 uint8_t TskFindFiles::handleError() {
     std::string str = tsk_error_get();
     str += "\n";
-    logOutputToFile(str.c_str());
+    ReportUtil::logOutputToFile(str.c_str());
     return 0;
 }
 
@@ -77,13 +76,16 @@ TskFindFiles::filterFs(TSK_FS_INFO * fs_info)
 
     setFileFilterFlags(filterFlags);
 
-    std::string title = "Analyzing drive " + m_driveToProcess + " - Searching for files by attribute";
+    std::string title = "Analyzing drive " + m_driveDisplayName + " - Searching for files by attribute";
     if (TSK_FS_TYPE_ISNTFS(fs_info->ftype)) {
         NTFS_INFO *ntfs_info = (NTFS_INFO *)fs_info;
         if (ntfs_info->alloc_file_count == 0) {
             // we need to force the orphan finding process to get this count
-            tsk_fs_dir_open_meta(fs_info, fs_info->root_inum);
-            m_totalNumberOfFiles = ((NTFS_INFO*)fs_info)->alloc_file_count;
+            TSK_FS_DIR *fs_dir = tsk_fs_dir_open_meta(fs_info, fs_info->root_inum);
+            if (fs_dir) {
+                m_totalNumberOfFiles = ((NTFS_INFO*)fs_info)->alloc_file_count;
+            }
+            tsk_fs_dir_close(fs_dir);
         }
         title += ", 0% complete";
     }
@@ -115,7 +117,7 @@ TSK_RETVAL_ENUM TskFindFiles::processFile(TSK_FS_FILE *fs_file, const char *path
                 m_percentComplete = (unsigned short)(((float)m_fileCounter / (float)m_totalNumberOfFiles) * 100);
                 static unsigned short lastReportedPctComplete = 0;
                 if ((m_percentComplete != lastReportedPctComplete)) {
-                    std::string title = "Analyzing drive " + m_driveToProcess + " - Searching for files by attribute, "
+                    std::string title = "Analyzing drive " + m_driveDisplayName + " - Searching for files by attribute, "
                         + TskHelper::intToStr((long)m_percentComplete) + std::string("% complete");
                     SetConsoleTitleA(title.c_str());
                     lastReportedPctComplete = m_percentComplete;
diff --git a/tools/logicalimager/TskFindFiles.h b/tools/logicalimager/TskFindFiles.h
index e9335338c5b6b1aa2cbea167508f9239de3c5710..5b11568ef21d2d3f4629f72b7a84de8b07124081 100755
--- a/tools/logicalimager/TskFindFiles.h
+++ b/tools/logicalimager/TskFindFiles.h
@@ -23,7 +23,7 @@
 
 class TskFindFiles : public TskAuto {
 public:
-    TskFindFiles(const LogicalImagerConfiguration *config, const std::string &driveToProcess);
+    TskFindFiles(const LogicalImagerConfiguration *config, const std::string &driveName);
     ~TskFindFiles();
     virtual TSK_FILTER_ENUM filterFs(TSK_FS_INFO * fs_info);
     virtual TSK_RETVAL_ENUM processFile(TSK_FS_FILE *fs_file, const char *path);
@@ -34,5 +34,5 @@ class TskFindFiles : public TskAuto {
     size_t m_fileCounter;
     int m_totalNumberOfFiles;
     unsigned short m_percentComplete;
-    const std::string m_driveToProcess;
+    const std::string m_driveDisplayName;
 };
\ No newline at end of file
diff --git a/tools/logicalimager/TskHelper.cpp b/tools/logicalimager/TskHelper.cpp
index 49277205d9b4316f18e6da46f66b881dd0924eae..a49415d15b8360ea15abdd4a469a6650e9c204d1 100755
--- a/tools/logicalimager/TskHelper.cpp
+++ b/tools/logicalimager/TskHelper.cpp
@@ -17,6 +17,7 @@
 #include "tsk/base/tsk_base_i.h"
 #include "tsk/fs/tsk_fs_i.h"
 #include "TskHelper.h"
+#include "ReportUtil.h"
 
 static std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
 
@@ -33,13 +34,22 @@ TskHelper::TskHelper()
 
 TskHelper::~TskHelper()
 {
+    if (m_img_info) {
+        tsk_img_free(m_img_info);
+    }
+    for (auto itr = m_FSInfoList.begin(); itr != m_FSInfoList.end(); itr++) {
+        tsk_fs_close(*itr);
+        tsk_fs_free(*itr);
+    }
 }
 
 void TskHelper::reset() {
     releasePath2InumCache();
     m_img_info = NULL;
+    for (auto itr = m_FSInfoList.begin(); itr != m_FSInfoList.end(); itr++) {
+        tsk_fs_close(*itr);
+    }
     m_FSInfoList.clear();
-    m_path2InumCache.clear();
 }
 
 /**
@@ -156,6 +166,14 @@ bool TskHelper::startsWith(const std::string &bigStr, const std::string &lilStr)
         && equal(lilStr.begin(), lilStr.end(), bigStr.begin());
 }
 
+/*
+* Check if the string str ends with suffix
+*/
+bool TskHelper::endsWith(const std::string &str, const std::string &suffix) {
+    return str.size() >= suffix.size() &&
+        str.compare(str.size() - suffix.size(), suffix.size(), suffix) == 0;
+}
+
 /**
  * \ingroup fslib
  *
@@ -185,7 +203,9 @@ TskHelper::path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension,
     // std::cout << "TskHlprPath2Inum: Looking for " << a_path << " in FS " << a_fs->offset << std::endl;
 
     a_result.setINUM(0);
-    *a_fs_file = NULL;
+    if (a_fs_file) {
+        *a_fs_file = NULL;
+    }
 
     std::string path_matched;
     bool ignoreExt = false;
@@ -261,7 +281,9 @@ TskHelper::path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension,
             if (targetPathSubString.length() == targetPathAsString.length()) {
                 a_result.setINUM(inum);
                 a_result.setFSNameFlags(pInumCacheData->getFSNameFlag());
-                *a_fs_file = NULL;
+                if (a_fs_file) {
+                    *a_fs_file = NULL;
+                }
                 free(cpath);
                 return 0;
             }
@@ -285,7 +307,9 @@ TskHelper::path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension,
                 * specify a trailing / at the end. */
                 if (cur_name_to_match == NULL) {
                     a_result.setINUM(inum);
-                    *a_fs_file = NULL;
+                    if (a_fs_file) {
+                        *a_fs_file = NULL;
+                    }
                     free(cpath);
                     return 0;
                 }
@@ -386,6 +410,7 @@ TskHelper::path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension,
                 tmp.append(fs_name->name);
                 if (addPathToInumCache(a_fs, tmp, pCacheData) == false) {
                     // it was already in the cache
+                    tsk_fs_dir_close(pCacheData->getFSDir());
                     delete (pCacheData);
                 }
             }
@@ -510,6 +535,7 @@ TskHelper::path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension,
                 if (pCacheData) {
                     if (addPathToInumCache(a_fs, path_matched, pCacheData) == false) {
                         // it was already in the cache
+                        tsk_fs_dir_close(pCacheData->getFSDir());
                         delete (pCacheData);
                     }
                 }
@@ -554,8 +580,10 @@ TskHelper::path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension,
                         tsk_fs_name_copy(a_fs_name, fs_name_best);
                     }
 
-                    // return the TSK_FS_FILE if one was requested
-                    *a_fs_file = tsk_fs_file_open_meta(a_fs, NULL, fs_name_best->meta_addr);
+                    if (a_fs_file) {
+                        // return the TSK_FS_FILE if one was requested
+                        *a_fs_file = tsk_fs_file_open_meta(a_fs, NULL, fs_name_best->meta_addr);
+                    }
                 }
 
                 //cerr << getNowTimeStr() << "TSKHlprPath2inum(): Found = " << std::string(a_path) << endl;
@@ -686,6 +714,16 @@ void TskHelper::replaceAll(std::string &str, const std::string &from, const std:
     }
 }
 
+void TskHelper::replaceAll(std::wstring &str, const std::wstring &from, const std::wstring &to) {
+    if (from.empty())
+        return;
+    size_t start_pos = 0;
+    while ((start_pos = str.find(from, start_pos)) != std::wstring::npos) {
+        str.replace(start_pos, from.length(), to);
+        start_pos += to.length();
+    }
+}
+
 /**
 * replaceAll - replaces all occurences of 'from' string with the 'to' string, in the given input string, starting the search from specified position
 *
@@ -705,3 +743,96 @@ void TskHelper::replaceAll(std::string &str, const std::string &from, const std:
         start_pos += to.length();
     }
 }
+
+/*
+* Open the file system in the disk image and add it to the TskHelper.getInstance()
+*
+* @param img Disk image to open
+* @param byteOffset Byte offset to start analyzing from
+*/
+void TskHelper::openFs(TSK_IMG_INFO *img, TSK_OFF_T byteOffset) {
+    TSK_FS_INFO *fs_info;
+    if ((fs_info = tsk_fs_open_img(img, byteOffset, TSK_FS_TYPE_DETECT)) != NULL) {
+        // Tell TSKHelper about this FS
+        TskHelper::getInstance().addFSInfo(fs_info);
+    }
+    else {
+        // check if it is bitlocker - POC effort
+        char buffer[32];
+        tsk_img_read(img, byteOffset, buffer, 32);
+        if ((buffer[3] == '-') && (buffer[4] == 'F') &&
+            (buffer[5] == 'V') && (buffer[6] == 'E') &&
+            (buffer[7] == '-') && (buffer[8] == 'F') &&
+            (buffer[9] == 'S') && (buffer[10] == '-'))
+        {
+            std::cerr << "Volume is encrypted with BitLocker." << std::endl
+                << "Volume did not have a file system and has a BitLocker signature" << std::endl;
+        }
+
+        ReportUtil::printDebug("Volume does not contain a file system");
+        tsk_error_reset();
+    }
+}
+
+void TskHelper::enumerateFileAndVolumeSystems(TSK_IMG_INFO *img) {
+    TSK_VS_INFO *vs_info;
+    if ((vs_info = tsk_vs_open(img, 0, TSK_VS_TYPE_DETECT)) == NULL) {
+        ReportUtil::printDebug("No volume system found. Looking for file system");
+        TskHelper::getInstance().openFs(img, 0);
+    }
+    else {
+        // process the volume system
+        //fprintf(stdout, "Partition:\n");
+        for (TSK_PNUM_T i = 0; i < vs_info->part_count; i++) {
+            const TSK_VS_PART_INFO *vs_part = tsk_vs_part_get(vs_info, i);
+            //fprintf(stdout, "#%i: %s Start: %s Length: %s\n",
+            //    i, vs_part->desc, std::to_string(vs_part->start).c_str(), std::to_string(vs_part->len).c_str());
+            if ((vs_part->flags & TSK_VS_PART_FLAG_UNALLOC) || (vs_part->flags & TSK_VS_PART_FLAG_META)) {
+                continue;
+            }
+            TskHelper::getInstance().openFs(img, vs_part->start * vs_part->vs->block_size);
+        }
+        tsk_vs_close(vs_info);
+    }
+}
+
+/*
+* Add all FS found in the given image to TskHelp::getInstance()
+* Returns TSK_IMG_INFO *, caller should call img->close(img) when done.
+* The FS can be obtained by calling TskHelper::getInstance().getFSInfoList()
+* Caller must call TskHelper::getInstance().reset() when done with the FS.
+* May exit the program if image failed to open.
+*
+* @param image Path to image
+* @return TSK_IMG_INFO of the opened image if success, NULL if fail.
+*/
+TSK_IMG_INFO *TskHelper::addFSFromImage(const TSK_TCHAR *image) {
+    TSK_IMG_INFO *img;
+    TSK_IMG_TYPE_ENUM imgtype = TSK_IMG_TYPE_DETECT;
+    unsigned int ssize = 0;
+
+    if ((img = tsk_img_open(1, &image, imgtype, ssize)) == NULL) {
+        ReportUtil::consoleOutput(stderr, "%s\n", tsk_error_get());
+        return NULL;
+    }
+
+    TskHelper::getInstance().reset();
+    TskHelper::getInstance().setImgInfo(img);
+
+    TSK_VS_INFO *vs_info;
+    if ((vs_info = tsk_vs_open(img, 0, TSK_VS_TYPE_DETECT)) == NULL) {
+        TskHelper::getInstance().openFs(img, 0);
+    }
+    else {
+        // process the volume system
+        for (TSK_PNUM_T i = 0; i < vs_info->part_count; i++) {
+            const TSK_VS_PART_INFO *vs_part = tsk_vs_part_get(vs_info, i);
+            if ((vs_part->flags & TSK_VS_PART_FLAG_UNALLOC) || (vs_part->flags & TSK_VS_PART_FLAG_META)) {
+                continue;
+            }
+            TskHelper::getInstance().openFs(img, vs_part->start * vs_part->vs->block_size);
+        }
+        tsk_vs_close(vs_info);
+    }
+    return img;
+}
\ No newline at end of file
diff --git a/tools/logicalimager/TskHelper.h b/tools/logicalimager/TskHelper.h
index c43eba88d9add6a2b196279244b4c680a3d62536..cbacb7477ee674a799d2444aae62df59c27a4fd4 100755
--- a/tools/logicalimager/TskHelper.h
+++ b/tools/logicalimager/TskHelper.h
@@ -69,22 +69,26 @@ class TskHelper {
     void reset(void);
 
     void addFSInfo(TSK_FS_INFO * fs_info);
-    TSK_FS_INFO * getFSInfo(TSK_OFF_T offset);
+    TSK_FS_INFO *getFSInfo(TSK_OFF_T offset);
     const std::list<TSK_FS_INFO *> getFSInfoList();
 
-    void setImgInfo(TSK_IMG_INFO *a_img_info) { m_img_info = a_img_info; }
-
+    void setImgInfo(TSK_IMG_INFO *a_img_info) { m_img_info = a_img_info; }; 
+    void enumerateFileAndVolumeSystems(TSK_IMG_INFO *img);
+    void openFs(TSK_IMG_INFO *img, TSK_OFF_T byteOffset);
     int path2Inum(TSK_FS_INFO *a_fs, const char *a_path, bool anyExtension, TSKFileNameInfo &a_result, TSK_FS_NAME *a_fs_name, TSK_FS_FILE **a_fs_file);
+    static TSK_IMG_INFO *addFSFromImage(const TSK_TCHAR *image);
 
     static std::string toLower(const std::string &srcStr);
     static std::string toUpper(const std::string &srcStr);
     static std::string toNarrow(const std::wstring& a_utf16Str);
     static std::wstring toWide(const std::string& a_utf8Str);
     static bool startsWith(const std::string &bigStr, const std::string &lilStr);
+    static bool endsWith(const std::string &str, const std::string &suffix);
     static std::string intToStr(long l);
     static std::string intToStr(size_t l);
     static void replaceAll(std::string& str, const std::string& from, const std::string& to);
     static void replaceAll(std::string& str, const std::string& from, const std::string& to, size_t pos);
+    static void replaceAll(std::wstring &str, const std::wstring &from, const std::wstring &to);
 
 private:
     std::string stripExt(const char *a_path);    // strip the extension from the given name, if any
diff --git a/tools/logicalimager/tsk_logical_imager.cpp b/tools/logicalimager/tsk_logical_imager.cpp
index f3d62adb3ef48657eedc7600aec33ca11b321cb9..05f8490ec6fa023bbf2b76ff48c3e8325adc5114 100644
--- a/tools/logicalimager/tsk_logical_imager.cpp
+++ b/tools/logicalimager/tsk_logical_imager.cpp
@@ -3,7 +3,7 @@
  ** The Sleuth Kit
  **
  ** Brian Carrier [carrier <at> sleuthkit [dot] org]
- ** Copyright (c) 2010-2011 Brian Carrier.  All Rights reserved
+ ** Copyright (c) 2019 Basis Technology.  All Rights reserved
  **
  ** This software is distributed under the Common Public License 1.0
  **
@@ -22,6 +22,7 @@
 #include <Wbemidl.h>
 #include <shlwapi.h>
 #include <fstream>
+#include <winbase.h>
 
 #pragma comment(lib, "wbemuuid.lib")
 
@@ -36,129 +37,17 @@
 #include "TskHelper.h"
 #include "RegistryAnalyzer.h"
 
-std::wstring GetLastErrorStdStrW();
-std::string GetErrorStdStr(DWORD err);
-std::wstring GetErrorStdStrW(DWORD err);
-TSK_IMG_INFO *addFSFromImage(const TSK_TCHAR *image);
+#include "DriveUtil.h"
+#include "ReportUtil.h"
+#include "FileExtractor.h"
 
 static TSK_TCHAR *progname;
-FILE *consoleFile = NULL;
-bool promptBeforeExit = true;
-static std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>> converter;
-
-static void handleExit(int code) {
-    if (consoleFile) {
-        fclose(consoleFile);
-        consoleFile = NULL;
-    }
-    if (promptBeforeExit) {
-        std::cout << std::endl << "Press any key to exit";
-        (void)_getch();
-    }
-    exit(code);
-}
+bool createVHD = false;
 
-void openConsoleOutput(const std::string &consoleFileName) {
-    consoleFile = fopen(consoleFileName.c_str(), "w");
-    if (!consoleFile) {
-        fprintf(stderr, "ERROR: Failed to open console file %s\n", consoleFileName.c_str());
-        handleExit(1);
-    }
-}
+static std::wstring cwd;
 
-void logOutputToFile(const char *buf) {
-    if (consoleFile) {
-        fprintf(consoleFile, buf);
-    }
-}
-
-void consoleOutput(FILE *fd, const char *msg, ...) {
-    char buf[2048];
-    va_list args;
-
-    va_start(args, msg);
-    vsnprintf(buf, sizeof(buf), msg, args);
-    fprintf(fd, buf);
-    // output to console file
-    logOutputToFile(buf);
-    va_end(args);
-}
-
-void printDebug(char *msg, const char *fmt...) {
-    if (tsk_verbose) {
-        string prefix("tsk_logical_imager: ");
-        string message = prefix + msg + "\n";
-        tsk_fprintf(stderr, message.c_str(), fmt);
-    }
-}
-
-void printDebug(char *msg) {
-    printDebug(msg, "");
-}
-
-/**
-* GetErrorStdStr - returns readable error message for the given error code
-*
-* @param err error code
-* @returns error message string
-*/
-string GetErrorStdStr(DWORD err) {
-    return TskHelper::toNarrow(GetErrorStdStrW(err));
-}
-
-/**
-* GetLastErrorStdStrW - returns readable widestring error message for the last error code as reported by GetLastError()
-*
-* @returns error message wide string
-*/
-static std::wstring GetLastErrorStdStrW() {
-    DWORD error = GetLastError();
-    return GetErrorStdStrW(error);
-}
-
-/**
-* GetErrorStdStrW - returns readable widestring error message for the given error code
-*
-* @param err error code
-* @returns error message wide string
-*/
-static std::wstring GetErrorStdStrW(DWORD a_err) {
-    if (ERROR_SUCCESS != a_err) {
-        LPVOID lpMsgBuf;
-        DWORD bufLen = FormatMessageW(
-            FORMAT_MESSAGE_ALLOCATE_BUFFER |
-            FORMAT_MESSAGE_FROM_SYSTEM |
-            FORMAT_MESSAGE_IGNORE_INSERTS,
-            NULL,
-            a_err,
-            MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
-            (LPWSTR)&lpMsgBuf,
-            0, NULL);
-        if (bufLen) {
-            LPCWSTR lpMsgStr = (LPCWSTR)lpMsgBuf;
-
-            /***
-            LPWSTR p = const_cast<LPWSTR>(_tcschr(lpMsgStr, _T('\r')));
-            if(p != NULL) { // lose CRLF
-            *p = _T('\0');
-            }
-            ****/
-
-            std::wstring result(lpMsgStr, lpMsgStr + bufLen);
-
-            size_t pos = result.find_last_not_of(L"\r\n");
-            if (pos != std::wstring::npos) {
-                result.resize(pos);
-            }
-
-
-            LocalFree(lpMsgBuf);
-
-            return result;
-        }
-    }
-    return std::wstring(L"no error");
-}
+static std::string outputLocation;
+static FileExtractor *fileExtractor = NULL;
 
 /**
 * isWinXPOrOlder: Determine if we are on Windows XP or older OS
@@ -225,13 +114,13 @@ static int getLocalHost(string &a_hostName) {
     WSADATA wsaData;
     int iResult = WSAStartup(MAKEWORD(2, 2), &wsaData);
     if (iResult != 0) {
-        consoleOutput(stderr, "WSAStartup failed with error = %d\n", iResult);
+        ReportUtil::consoleOutput(stderr, "WSAStartup failed with error = %d\n", iResult);
         return -1;
     }
 
     char buf[MAX_PATH];
     if (gethostname(buf, sizeof(buf)) == SOCKET_ERROR) {
-        consoleOutput(stderr, "Error getting host name. Error =  %d\n", WSAGetLastError());
+        ReportUtil::consoleOutput(stderr, "Error getting host name. Error =  %d\n", WSAGetLastError());
         return -1;
     }
     a_hostName = string(buf);
@@ -241,14 +130,14 @@ static int getLocalHost(string &a_hostName) {
 }
 
 /**
-* createDirectory: Create a directory to store sparse_image.vhd
+* createSessionDirectory: Create a directory relative to current working directory for host.
 *
-* @param directoryPathname - the directory pathname created
+* @param [out] directoryPathname - the directory pathname created
 * @returns  0 on success
 *           -1 if error
 *
 */
-static int createDirectory(string &directoryPathname) {
+static int createSessionDirectory(string &directoryPathname) {
     time_t now;
     struct tm localTime;
 
@@ -268,7 +157,7 @@ static int createDirectory(string &directoryPathname) {
     if (stat(outDirName.c_str(), &st) != 0) {
         int rc = _mkdir(outDirName.c_str());
         if (rc != 0) {
-            consoleOutput(stderr, "Failed to create output folder = %s Error: %d\n", outDirName.c_str(), rc);
+            ReportUtil::consoleOutput(stderr, "Failed to create output folder = %s Error: %d\n", outDirName.c_str(), rc);
             return -1;
         }
     }
@@ -277,462 +166,7 @@ static int createDirectory(string &directoryPathname) {
 }
 
 /**
-* wmi_init: Initialize WMN
-*
-* @param input wmiNamespace - wmi namespace to open
-* @returns  0 on success
-*                        WBEM_E_INVALID_NAMESPACE, if namespace is not found
-*           -1 if error
-*
-* Ref: https://msdn.microsoft.com/en-us/library/aa390423(VS.85).aspx
-*
-*/
-
-static long wmi_init(const std::wstring& wmiNamespace, IWbemLocator **ppWbemLocator, IWbemServices **ppWbemServices) {
-    HRESULT hres;
-
-    // Step 1: Initialize COM.
-
-    hres = CoInitializeEx(0, COINIT_MULTITHREADED);
-    if (FAILED(hres)) {
-        consoleOutput(stderr, "wmi_init: Failed to initialize COM library. Error code = %#X\n", hres);
-        return -1;                  // Program has failed.
-    }
-
-    // Step 2: Set general COM security levels
-    hres = CoInitializeSecurity(
-        NULL,
-        -1,                          // COM authentication
-        NULL,                        // Authentication services
-        NULL,                        // Reserved
-        RPC_C_AUTHN_LEVEL_DEFAULT,   // Default authentication
-        RPC_C_IMP_LEVEL_IMPERSONATE, // Default Impersonation
-        NULL,                        // Authentication info
-        EOAC_NONE,                   // Additional capabilities
-        NULL                         // Reserved
-    );
-
-    if (FAILED(hres)) {
-        consoleOutput(stderr, "wmi_init: Failed to initialize security. Error code = %#X\n", hres);
-        CoUninitialize();
-        return -1;                    // Program has failed.
-    }
-
-    // Step 3: Obtain the initial locator to WMI
-    hres = CoCreateInstance(
-        CLSID_WbemLocator,
-        0,
-        CLSCTX_INPROC_SERVER,
-        IID_IWbemLocator, (LPVOID *)ppWbemLocator);
-
-    if (FAILED(hres))
-    {
-        consoleOutput(stderr, "wmi_init: Failed to create IWbemLocator object. Err code = %#X\n", hres);
-        CoUninitialize();
-        return -1;                 // Program has failed.
-    }
-
-    // Step 4: Connect to WMI through the IWbemLocator::ConnectServer method
-    // Connect to the given namespace with
-    // the current user and obtain pointer pSvc
-    // to make IWbemServices calls.
-    hres = (*ppWbemLocator)->ConnectServer(
-        _bstr_t(wmiNamespace.c_str()), // Object path of WMI namespace
-        NULL,                    // User name. NULL = current user
-        NULL,                    // User password. NULL = current
-        0,                       // Locale. NULL indicates current
-        NULL,                    // Security flags.
-        0,                       // Authority (e.g. Kerberos)
-        0,                       // Context object
-        ppWbemServices                    // pointer to IWbemServices proxy
-    );
-
-    if (FAILED(hres)) {
-        if (WBEM_E_INVALID_NAMESPACE != hres) {
-            consoleOutput(stderr, "wmi_init: Could not connect to namespace %s, Error = %s\n",
-                TskHelper::toNarrow(wmiNamespace).c_str(), GetErrorStdStr(hres).c_str());
-        }
-
-        (*ppWbemLocator)->Release();
-        CoUninitialize();
-
-        return (WBEM_E_INVALID_NAMESPACE == hres) ? hres : -1;
-    }
-
-    // Step 5: Set security levels on the proxy
-    hres = CoSetProxyBlanket(
-        *ppWbemServices,                        // Indicates the proxy to set
-        RPC_C_AUTHN_WINNT,           // RPC_C_AUTHN_xxx
-        RPC_C_AUTHZ_NONE,            // RPC_C_AUTHZ_xxx
-        NULL,                        // Server principal name
-        RPC_C_AUTHN_LEVEL_CALL,      // RPC_C_AUTHN_LEVEL_xxx
-        RPC_C_IMP_LEVEL_IMPERSONATE, // RPC_C_IMP_LEVEL_xxx
-        NULL,                        // client identity
-        EOAC_NONE                    // proxy capabilities
-    );
-
-    if (FAILED(hres)) {
-        consoleOutput(stderr, "wmi_init: Could not set proxy blanket. Error code = %#X\n", hres);
-        (*ppWbemServices)->Release();
-        (*ppWbemLocator)->Release();
-        CoUninitialize();
-        return -1;               // Program has failed.
-    }
-    return 0;
-}
-
-/**
-* wmi_close: closes WMI
-*
-* @returns  0 on success
-*           -1 if error
-*
-*/
-static int wmi_close(IWbemLocator **ppWbemLocator, IWbemServices **ppWbemServices) {
-    // Cleanup
-    // ========
-
-    (*ppWbemServices)->Release();
-    (*ppWbemLocator)->Release();
-    CoUninitialize();
-
-    (*ppWbemServices) = NULL;
-    (*ppWbemLocator) = NULL;
-
-    return 0;
-}
-
-/**
-* checkDriveForLDM: checks if the given drive is an LDM disk
-*
-* @param input driveLetter drive to check, for example C:
-*
-* @returns  0 if the drive is NOT an LDM disk
-*           1 if the drive IS an LDM disk
-*           -1 if error, or if drive not found
-*
-*/
-static int checkDriveForLDM(const string& driveLetter) {
-
-    IWbemLocator *pWbemLocator = NULL;
-    IWbemServices *pWbemServices = NULL;
-
-    if (0 != wmi_init(L"ROOT\\CIMV2", &pWbemLocator, &pWbemServices)) {
-        return -1;
-    }
-
-    // Use the IWbemServices pointer to make requests of WMI.
-    // Make requests here:
-    HRESULT hres;
-    IEnumWbemClassObject* pEnumerator = NULL;
-    bool bDriveFound = false;
-    int isLDM = 0;
-
-    std::wstring wstrQuery = L"ASSOCIATORS OF {Win32_LogicalDisk.DeviceID='";
-    wstrQuery += TskHelper::toWide(driveLetter);
-    wstrQuery += L"'} where AssocClass=Win32_LogicalDiskToPartition";
-
-    // Run WMI query
-    hres = pWbemServices->ExecQuery(
-        bstr_t("WQL"),
-        bstr_t(wstrQuery.c_str()),
-        WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
-        NULL,
-        &pEnumerator);
-
-    if (FAILED(hres)) {
-        std::cerr << "WMI Query for partition type failed. "
-            << "Error code = 0x"
-            << std::hex << hres << std::endl;
-        wmi_close(&pWbemLocator, &pWbemServices);
-        return -1;
-    } else {
-        IWbemClassObject *pclsObj;
-        ULONG uReturn = 0;
-        while (pEnumerator) {
-            hres = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
-            if (0 == uReturn) break;
-
-            VARIANT vtProp, vtProp2;
-
-            hres = pclsObj->Get(_bstr_t(L"Type"), 0, &vtProp, 0, 0);
-            std::wstring partitionType = vtProp.bstrVal;
-
-            hres = pclsObj->Get(_bstr_t(L"DeviceID"), 0, &vtProp2, 0, 0);
-            std::wstring deviceID = vtProp2.bstrVal;
-
-            VariantClear(&vtProp);
-            VariantClear(&vtProp2);
-
-            bDriveFound = true;
-
-            //std::wcout << L"Drive: " << TskHelper::toWide(driveLetter) << ", DeviceID:  " << deviceID << ", Type: " << partitionType << std::endl;
-            if (string::npos != TskHelper::toLower(TskHelper::toNarrow(partitionType)).find("logical disk manager")) {
-                //std::cerr << "Found Logical Disk Manager disk for drive = " << driveLetter << std::endl;
-                isLDM = 1;
-            }
-        }
-    }
-    pEnumerator->Release();
-
-    wmi_close(&pWbemLocator, &pWbemServices);
-
-    return bDriveFound ? isLDM : -1;
-}
-
-/**
-* checkDriveForBitlocker: checks if the given drive has BitLocker encrypted
-*
-* @param input driveLetter drive to check, for example C:
-*
-* @returns  0  if the drive is not encrypted
-*           1  if the drive is Bitlocker encrypted
-*           -1 if error
-*
-*/
-static int checkDriveForBitlocker(const string& driveLetter) {
-
-    IWbemLocator *pWbemLocator = NULL;
-    IWbemServices *pWbemServices = NULL;
-
-    long rc = 0;
-
-    std::wstring wsBitLockerNamespace = L"ROOT\\CIMV2\\Security\\MicrosoftVolumeEncryption";
-
-    // Init WMI with the requisite namespace. This may fail on some versions of Windows, if Bitlocker in not installed.
-    rc = wmi_init(wsBitLockerNamespace, &pWbemLocator, &pWbemServices);
-    if (0 != rc) {
-        if ((WBEM_E_INVALID_NAMESPACE == rc)) {
-            std::cerr << " Bitlocker is not installed." << std::endl;
-            return 0;
-        } else {
-            std::cerr << "Failed to connect to WMI namespace = " << TskHelper::toNarrow(wsBitLockerNamespace) << std::endl;
-            return -1;
-        }
-    }
-
-    // Use the IWbemServices pointer to make requests of WMI.
-    // Make requests here:
-    HRESULT hres;
-    IEnumWbemClassObject* pEnumerator = NULL;
-
-    unsigned int bitLockerStatus = 0; // assume no Bitlocker
-    int returnStatus = 0;
-                                                                      // WMI query
-    std::wstring wstrQuery = L"SELECT * FROM Win32_EncryptableVolume where driveletter = '";
-    wstrQuery += TskHelper::toWide(driveLetter);
-    wstrQuery += L"'";
-
-    // Run WMI query
-    hres = pWbemServices->ExecQuery(
-        bstr_t("WQL"),
-        bstr_t(wstrQuery.c_str()),
-        WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
-        NULL,
-        &pEnumerator);
-
-    if (FAILED(hres)) {
-        std::cerr << "WMI Query for Win32_EncryptableVolume failed. "
-            << "Error code = 0x"
-            << std::hex << hres << std::endl;
-        wmi_close(&pWbemLocator, &pWbemServices);
-        return -1;
-    } else {
-        IWbemClassObject *pclsObj;
-        ULONG uReturn = 0;
-        while (pEnumerator) {
-            hres = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
-            if (0 == uReturn) break;
-
-            VARIANT vtProp;
-            hres = pclsObj->Get(_bstr_t(L"EncryptionMethod"), 0, &vtProp, 0, 0);
-
-            if (WBEM_E_NOT_FOUND == hres) { // Means Bitlocker is not installed
-                bitLockerStatus = 0;
-            } else {
-                unsigned int encryptionMethod = vtProp.uintVal;
-                bitLockerStatus = (0 == encryptionMethod) ? 0 : 1;
-                if (bitLockerStatus == 1) {
-                    returnStatus = 1;
-                }
-            }
-            VariantClear(&vtProp);
-        }
-    }
-    pEnumerator->Release();
-
-    wmi_close(&pWbemLocator, &pWbemServices);
-
-    return returnStatus;
-}
-
-/**
-* isDriveLocked: checks if the given drive is BitLocker locked
-*
-* @param input driveLetter drive to check, for example C:
-*
-* @returns  0  if the drive is not locked
-*           1  if the drive is Bitlocker locked
-*           -1 if error
-*
-*/
-static int isDriveLocked(const string& driveLetter) {
-
-    IWbemLocator *pWbemLocator = NULL;
-    IWbemServices *pWbemServices = NULL;
-
-    long rc = 0;
-
-    std::wstring wsBitLockerNamespace = L"ROOT\\CIMV2\\Security\\MicrosoftVolumeEncryption";
-
-    // Init WMI with the requisite namespace. This may fail on some versions of Windows, if Bitlocker in not installed.
-    rc = wmi_init(wsBitLockerNamespace, &pWbemLocator, &pWbemServices);
-    if (0 != rc) {
-        if ((WBEM_E_INVALID_NAMESPACE == rc)) {
-            std::cerr << " Bitlocker is not installed." << std::endl;
-            return 0;
-        }
-        else {
-            std::cerr << "Failed to connect to WMI namespace = " << TskHelper::toNarrow(wsBitLockerNamespace) << std::endl;
-            return -1;
-        }
-    }
-
-    // Use the IWbemServices pointer to make requests of WMI.
-    // Make requests here:
-    HRESULT hres;
-    IEnumWbemClassObject* pEnumerator = NULL;
-
-    int returnStatus = 0;
-    // WMI query
-    std::wstring wstrQuery = L"SELECT * FROM Win32_EncryptableVolume where driveletter = '";
-    wstrQuery += TskHelper::toWide(driveLetter);
-    wstrQuery += L"'";
-
-    // Run WMI query
-    hres = pWbemServices->ExecQuery(
-        bstr_t("WQL"),
-        bstr_t(wstrQuery.c_str()),
-        WBEM_FLAG_FORWARD_ONLY | WBEM_FLAG_RETURN_IMMEDIATELY,
-        NULL,
-        &pEnumerator);
-
-    if (FAILED(hres)) {
-        std::cerr << "WMI Query for Win32_EncryptableVolume failed. "
-            << "Error code = 0x"
-            << std::hex << hres << std::endl;
-        wmi_close(&pWbemLocator, &pWbemServices);
-        return -1;
-    }
-    else {
-        IWbemClassObject *pclsObj;
-        ULONG uReturn = 0;
-        while (pEnumerator) {
-            hres = pEnumerator->Next(WBEM_INFINITE, 1, &pclsObj, &uReturn);
-            if (0 == uReturn) break;
-
-            VARIANT vtProp;
-            hres = pclsObj->Get(_bstr_t(L"ProtectionStatus"), 0, &vtProp, 0, 0);
-
-            if (WBEM_E_NOT_FOUND != hres) {
-                unsigned int protectionStatus = vtProp.uintVal;
-                if (2 == protectionStatus) {
-                    returnStatus = 1;
-                }
-            }
-            VariantClear(&vtProp);
-        }
-    }
-    pEnumerator->Release();
-
-    wmi_close(&pWbemLocator, &pWbemServices);
-
-    return returnStatus;
-}
-
-/**
-* getPhysicalDrives: return a list of physical drives
-*
-* @param output a vector of physicalDrives
-* @returns true on success, or false on error
-*/
-static BOOL getPhysicalDrives(std::vector<std::wstring> &phyiscalDrives) {
-    char physical[60000];
-
-    /* Get list of Windows devices.  Result is a list of NULL
-     * terminated device names. */
-    if (QueryDosDeviceA(NULL, (LPSTR)physical, sizeof(physical))) {
-        phyiscalDrives.clear();
-        for (char *pos = physical; *pos; pos += strlen(pos) + 1) {
-            std::wstring str(TskHelper::toWide(pos));
-            if (str.rfind(_TSK_T("PhysicalDrive")) == 0) {
-                phyiscalDrives.push_back(str);
-                printDebug("Found %s from QueryDosDeviceA", pos);
-            }
-        }
-    } else {
-        consoleOutput(stderr, "QueryDosDevice() return error: %d\n", GetLastError());
-        return false;
-    }
-    return true;
-}
-
-static char *driveTypeToString(UINT type) {
-    switch (type) {
-        case DRIVE_UNKNOWN:
-            return "DRIVE_UNKNOWN";
-        case DRIVE_NO_ROOT_DIR:
-            return "DRIVE_NO_ROOT_DIR";
-        case DRIVE_REMOVABLE:
-            return "DRIVE_REMOVABLE";
-        case DRIVE_FIXED:
-            return "DRIVE_FIXED";
-        case DRIVE_REMOTE:
-            return "DRIVE_REMOTE";
-        case DRIVE_CDROM:
-            return "DRIVE_CDROM";
-        case DRIVE_RAMDISK:
-            return "DRIVE_RAMDISK";
-        default:
-            return "UNKNOWN";
-    }
-}
-
-static bool hasBitLockerOrLDM(const std::string &systemDriveLetter) {
-    int checkLDMStatus = 0;
-    int checkBitlockerStatus = 0;
-
-    checkLDMStatus = checkDriveForLDM(systemDriveLetter);
-    if (1 == checkLDMStatus) {
-        printDebug("System drive %s is an LDM disk\n", systemDriveLetter.c_str());
-        return TRUE;
-    }
-
-    // If bitlocker protection is enabled, then analyze it
-    checkBitlockerStatus = checkDriveForBitlocker(systemDriveLetter);
-    if (1 == checkBitlockerStatus) {
-        printDebug("System drive %s is BitLocker encrypted\n", systemDriveLetter.c_str());
-        return TRUE;
-    }
-
-    if (0 == checkLDMStatus && 0 == checkBitlockerStatus) {
-        return false;        // neither LDM nor BitLocker detected
-    }
-    else { // an error happened in determining LDM or ProtectionStatus
-        if (-1 == checkLDMStatus) {
-            printDebug("Error in checking LDM disk\n");
-        }
-        if (-1 == checkBitlockerStatus) {
-            printDebug("Error in checking BitLocker protection status\n");
-        }
-
-        // Take a chance and go after PhysicalDrives, few systems have LDM or Bitlocker
-        return false;
-    }
-}
-
-/**
-* getDrivesToProcess() - returns the drive to process
+* getDrivesToProcess() - returns the drives to process
 *          By default we process all available PhysicalDrives, unless
 *          a drive is paritioned with LDM or has Bitlocker enabled, in which case we
 *          enumerate all drive letters.
@@ -757,11 +191,10 @@ static BOOL getDrivesToProcess(std::vector<std::wstring> &drivesToProcess) {
         char szDrive[_MAX_DRIVE + 1];
         sprintf(szDrive, "%c:\\", iDrive + 'A');
         UINT uDriveType = GetDriveTypeA(szDrive);
-        //printf("Drive %s Type %s\n", szDrive, driveTypeToString(uDriveType));
         if (uDriveType == DRIVE_FIXED || uDriveType == DRIVE_REMOVABLE) {
             sprintf(szDrive, "%c:", iDrive + 'A');
             systemDriveLetter = szDrive;
-            status |= hasBitLockerOrLDM(systemDriveLetter);
+            status |= DriveUtil::hasBitLockerOrLDM(systemDriveLetter);
             if (status) {
                 break;
             }
@@ -774,7 +207,6 @@ static BOOL getDrivesToProcess(std::vector<std::wstring> &drivesToProcess) {
             sprintf(szDrive, "%c:\\", iDrive + 'A');
             UINT uDriveType = GetDriveTypeA(szDrive);
             if (uDriveType == DRIVE_FIXED || uDriveType == DRIVE_REMOVABLE) {
-                //printf("Drive %s Type %s\n", szDrive, driveTypeToString(uDriveType));
                 sprintf(szDrive, "%c:", iDrive + 'A');
                 systemDriveLetter = szDrive;
                 drivesToProcess.push_back(TskHelper::toWide(systemDriveLetter));
@@ -784,48 +216,21 @@ static BOOL getDrivesToProcess(std::vector<std::wstring> &drivesToProcess) {
     } else {
         // None of the drives has BitLocker or LDM, try all physical drives
         drivesToProcess.clear();
-        if (getPhysicalDrives(drivesToProcess)) {
+        if (DriveUtil::getPhysicalDrives(drivesToProcess)) {
             return TRUE;
         }
     }
     return FALSE;
 }
 
-static void openFs(TSK_IMG_INFO *img, TSK_OFF_T byteOffset) {
-    TSK_FS_INFO *fs_info;
-    if ((fs_info = tsk_fs_open_img(img, byteOffset, TSK_FS_TYPE_DETECT)) != NULL) {
-        // Tell TSKHelper about this FS
-        TskHelper::getInstance().addFSInfo(fs_info);
-    }
-    else {
-        // check if it is bitlocker - POC effort
-        char buffer[32];
-        tsk_img_read(img, byteOffset, buffer, 32);
-        if ((buffer[3] == '-') && (buffer[4] == 'F') &&
-            (buffer[5] == 'V') && (buffer[6] == 'E') &&
-            (buffer[7] == '-') && (buffer[8] == 'F') &&
-            (buffer[9] == 'S') && (buffer[10] == '-'))
-        {
-            std::cerr << "Volume is encrypted with BitLocker." << std::endl
-                << "Volume did not have a file system and has a BitLocker signature" << std::endl;
-        }
-
-        printDebug("Volume does not contain a file system");
-        tsk_error_reset();
-    }
-}
-
 /**
-* hasTskLogicalImage - test if /tsk_logical_image.exe is in the image
+* hasTskLogicalImage - test if /tsk_logical_image.exe is in the image/drive
 *
-* @param image - path to image
 * @return true if found, false otherwise
 */
-static bool hasTskLogicalImager(const TSK_TCHAR *image) {
+static bool hasTskLogicalImager() {
     bool result = false;
 
-    TSK_IMG_INFO *img = addFSFromImage(image);
-
     const std::list<TSK_FS_INFO *> fsList = TskHelper::getInstance().getFSInfoList();
     TSKFileNameInfo filenameInfo;
     std::list<std::string> pathForTskLogicalImagerExe;
@@ -841,147 +246,35 @@ static bool hasTskLogicalImager(const TSK_TCHAR *image) {
                 tsk_fs_file_close(fs_file);
                 break;
             }
+            tsk_fs_file_close(fs_file);
         }
         if (result) {
             break;
         }
     }
-    img->close(img);
     TskHelper::getInstance().reset();
     return result;
 }
 
-
-FILE *m_alertFile = NULL;
-std::string driveToProcess;
-std::string outputVHDFilename;
-
-/*
-* Create the alert file and print the header.
-*
-* @param alertFilename Name of the alert file
-*/
-static void openAlert(const std::string &alertFilename) {
-    m_alertFile = fopen(alertFilename.c_str(), "w");
-    if (!m_alertFile) {
-        consoleOutput(stderr, "ERROR: Failed to open alert file %s\n", alertFilename.c_str());
-        handleExit(1);
-    }
-    fprintf(m_alertFile, "VHD file\tFile system offset\tFile metadata adddress\tExtraction status\tRule set name\tRule name\tDescription\tFilename\tPath\n");
-}
-
-/*
-* Write an file match alert record to the alert file. Also send same record to stdout.
-* An alert file record contains tab-separated fields:
-*   - VHD file
-*   - File system offset
-*   - Metadata address
-*   - extractStatus
-*   - ruleSetName
-*   - ruleName
-*   - description
-*   - name
-*   - path
-*
-* @param driveName Drive name
-* @param extractStatus Extract status: 0 if file was extracted, 1 otherwise
-* @param ruleMatchResult The rule match result
-* @param fs_file TSK_FS_FILE that matches
-* @param path Parent path of fs_file
-*/
-static void alert(const std::string &outputVHDFilename, TSK_RETVAL_ENUM extractStatus, const RuleMatchResult *ruleMatchResult, TSK_FS_FILE *fs_file, const char *path) {
-    if (fs_file->name && (strcmp(fs_file->name->name, ".") == 0 || strcmp(fs_file->name->name, "..") == 0)) {
-        // Don't alert . and ..
-        return;
-    }
-    // alert file format is "VHD file<tab>File system offset<tab>file metadata address<tab>extractStatus<tab>ruleSetName<tab>ruleName<tab>description<tab>name<tab>path"
-    fprintf(m_alertFile, "%s\t%" PRIdOFF "\t%" PRIuINUM "\t%d\t%s\t%s\t%s\t%s\t%s\n",
-        outputVHDFilename.c_str(),
-        fs_file->fs_info->offset,
-        (fs_file->meta ? fs_file->meta->addr : 0),
-        extractStatus,
-        ruleMatchResult->getRuleSetName().c_str(),
-        ruleMatchResult->getName().c_str(),
-        ruleMatchResult->getDescription().c_str(),
-        (fs_file->name ? fs_file->name->name : "name is null"),
-        path);
-    fflush(m_alertFile);
-
-    std::string fullPath(path);
-    if (fs_file->name) {
-        fullPath += fs_file->name->name;
-    } else {
-        fullPath += "name is null";
-    }
-
-    if (ruleMatchResult->isShouldAlert()) {
-        consoleOutput(stdout, "Alert for %s: %s\n",
-            ruleMatchResult->getRuleSetName().c_str(),
-            fullPath.c_str());
-    }
-}
-
-/*
-* Close the alert file.
-*/
-static void closeAlert() {
-    if (m_alertFile) {
-        fclose(m_alertFile);
-    }
-}
-
-/**
-* Extract a file. tsk_img_writer_create must have been called prior to this method.
-*
-* @param fs_file File details
-* @returns TSK_RETVAL_ENUM TSK_OK if file is extracted, TSK_ERR otherwise.
-*/
-static TSK_RETVAL_ENUM extractFile(TSK_FS_FILE *fs_file, const char *path) {
-    TSK_OFF_T offset = 0;
-    size_t bufferLen = 16 * 1024;
-    char buffer[16 * 1024];
-
-    while (true) {
-        ssize_t bytesRead = tsk_fs_file_read(fs_file, offset, buffer, bufferLen, TSK_FS_FILE_READ_FLAG_NONE);
-        if (bytesRead == -1) {
-            if (fs_file->meta && fs_file->meta->size == 0) {
-                // ts_fs_file_read returns -1 with empty files, don't report it.
-                return TSK_OK;
-            }
-            else {
-                printDebug("extractFile: tsk_fs_file_read returns -1 filename=%s\toffset=%" PRIxOFF "\n", fs_file->name->name, offset);
-                consoleOutput(stderr, "extractFile: tsk_fs_file_read returns -1 filename=%s\tpath=%s\toffset=%" PRIxOFF "\n", fs_file->name->name, path, offset);
-                return TSK_ERR;
-            }
-        }
-        else if (bytesRead == 0) {
-            return TSK_ERR;
-        }
-        offset += bytesRead;
-        if (offset >= fs_file->meta->size) {
-            break;
-        }
-    }
-    return TSK_OK;
-}
-
 /*
 * matchCallback - The function is passed into the LogicalImagerConfiguration.
-*                 It is called when a file matches a rule. Depending on the matchResult setting,
+*                 It is called when a file matches a rule. Depending on the matchedRuleInfo setting,
 *                 this function may extract the matched file and alert the user.
 *
-* @param matchResult The RuleMatchResult
+* @param matchedRuleInfo The MatchedRuleInfo
 * @param fs_file TSK_FS_FILE that matches the rule
 * @param path Path of the file
 *
 * @returns TSK_IMG_TYPE_ENUM TSK_OK if callback has no error
 */
-static TSK_RETVAL_ENUM matchCallback(const RuleMatchResult *matchResult, TSK_FS_FILE *fs_file, const char *path) {
+static TSK_RETVAL_ENUM matchCallback(const MatchedRuleInfo *matchedRuleInfo, TSK_FS_FILE *fs_file, const char *path) {
     TSK_RETVAL_ENUM extractStatus = TSK_ERR;
-    if (matchResult->isShouldSave()) {
-        extractStatus = extractFile(fs_file, path);
+    std::string extractedFilePath;
+
+    if (matchedRuleInfo->isShouldSave()) {
+        extractStatus = fileExtractor->extractFile(fs_file, path, extractedFilePath);
     }
-    alert(outputVHDFilename, extractStatus, matchResult, fs_file, path);
+    ReportUtil::reportResult(outputLocation, extractStatus, matchedRuleInfo, fs_file, path, extractedFilePath);
     return TSK_OK;
 }
 
@@ -1019,94 +312,98 @@ string getPathName(const string &fullPath) {
     return "";
 }
 
-static void usage() {
-    TFPRINTF(stderr,
-        _TSK_T
-        ("usage: %s [-c configPath]\n"),
-        progname);
-    tsk_fprintf(stderr, "\t-c configPath: The configuration file. Default is logical-imager-config.json\n");
-    tsk_fprintf(stderr, "\t-v: verbose output to stderr\n");
-    tsk_fprintf(stderr, "\t-V: Print version\n");
-    handleExit(1);
-}
+/**
+* Search for files that were specified by full path.
+* @param config Configuration that contains rules and other settings
+* @param driveName Name of drive being processed (for display only)
+*/
 
-/*
- * Add all FS found in the given image to TskHelp::getInstance()
- * Returns TSK_IMG_INFO *, caller should call img->close(img) when done.
- * The FS can be obtained by calling TskHelper::getInstance().getFSInfoList()
- * Caller must call TskHelper::getInstance().reset() when done with the FS
- */
-TSK_IMG_INFO *addFSFromImage(const TSK_TCHAR *image) {
-    TSK_IMG_INFO *img;
-    TSK_IMG_TYPE_ENUM imgtype = TSK_IMG_TYPE_DETECT;
-    unsigned int ssize = 0;
+static void searchFilesByFullPath(LogicalImagerConfiguration *config, const std::string &driveName) {
+    ReportUtil::consoleOutput(stdout, "%s - Searching for full path files\n", driveName.c_str());
+    SetConsoleTitleA(std::string("Analyzing drive " + driveName + " - Searching for full path files").c_str());
 
-    if ((img = tsk_img_open(1, &image, imgtype, ssize)) == NULL) {
-        consoleOutput(stderr, tsk_error_get());
-        handleExit(1);
-    }
+    const std::list<TSK_FS_INFO *> fsList = TskHelper::getInstance().getFSInfoList();
 
-    TskHelper::getInstance().reset();
-    TskHelper::getInstance().setImgInfo(img);
+    // cycle over each FS in the image
+    for (std::list<TSK_FS_INFO *>::const_iterator fsListIter = fsList.begin(); fsListIter != fsList.end(); ++fsListIter) {
 
-    TSK_VS_INFO *vs_info;
-    if ((vs_info = tsk_vs_open(img, 0, TSK_VS_TYPE_DETECT)) == NULL) {
-        openFs(img, 0);
-    }
-    else {
-        // process the volume system
-        for (TSK_PNUM_T i = 0; i < vs_info->part_count; i++) {
-            const TSK_VS_PART_INFO *vs_part = tsk_vs_part_get(vs_info, i);
-            if ((vs_part->flags & TSK_VS_PART_FLAG_UNALLOC) || (vs_part->flags & TSK_VS_PART_FLAG_META)) {
-                continue;
+        // cycle over the rule sets
+        const std::vector<std::pair<const MatchedRuleInfo *, std::list<std::string>>> fullFilePathsRules = config->getFullFilePaths();
+        for (std::vector<std::pair<const MatchedRuleInfo *, std::list<std::string>>>::const_iterator ruleSetIter = fullFilePathsRules.begin(); ruleSetIter != fullFilePathsRules.end(); ++ruleSetIter) {
+            const MatchedRuleInfo *matchedRuleInfo = ruleSetIter->first;
+            const std::list<std::string> filePathsInSet = ruleSetIter->second;
+
+            // cycle over each path in the set
+            for (std::list<std::string>::const_iterator filePathIter = filePathsInSet.begin(); filePathIter != filePathsInSet.end(); ++filePathIter) {
+                TSK_FS_FILE *fs_file;
+                TSK_FS_NAME *fs_name = tsk_fs_name_alloc(1024, 16);
+                TSKFileNameInfo filenameInfo;
+                int retval = TskHelper::getInstance().path2Inum(*fsListIter, filePathIter->c_str(), false, filenameInfo, fs_name, &fs_file);
+                if (retval == 0 && fs_file != NULL) {
+                    std::string parent = getPathName(*filePathIter);
+                    fs_file->name = fs_name;
+                    matchCallback(matchedRuleInfo, fs_file, parent.c_str());
+                }
+                tsk_fs_name_free(fs_name);
+                tsk_fs_file_close(fs_file);
             }
-            openFs(img, vs_part->start * vs_part->vs->block_size);
         }
-        tsk_vs_close(vs_info);
     }
-    return img;
 }
 
-bool driveIsFAT(char *drive) {
-    std::wstring imageStr = std::wstring(_TSK_T("\\\\.\\")) + TskHelper::toWide(std::string(drive));
-    const TSK_TCHAR *image = (TSK_TCHAR *)imageStr.c_str();
-    bool result = false;
+/**
+* Search for the files that were specified by attributes (extensions, etc.)
+* @param config COnfiguration with rules
+* @param driveName Display name of drive being processed
+* @param img_info Handle to open TSK image
+*/
+static void searchFilesByAttribute(LogicalImagerConfiguration *config, const std::string &driveName, TSK_IMG_INFO *img_info) {
+    TskFindFiles findFiles(config, driveName);
+    if (findFiles.openImageHandle(img_info)) {
+        ReportUtil::consoleOutput(stderr, "Failed to open imagePath, reason:%s\n", tsk_error_get());
+        return;
+    }
 
-    TSK_IMG_INFO *img = addFSFromImage(image);
+    ReportUtil::consoleOutput(stdout, "%s - Searching for files by attribute\n", driveName.c_str());
+    SetConsoleTitleA(std::string("Analyzing drive " + driveName + " - Searching for files by attribute").c_str());
 
-    const std::list<TSK_FS_INFO *> fsList = TskHelper::getInstance().getFSInfoList();
-    for (std::list<TSK_FS_INFO *>::const_iterator fsListIter = fsList.begin(); fsListIter != fsList.end(); ++fsListIter) {
-        TSK_FS_INFO *fsInfo = *fsListIter;
-        TSK_FS_TYPE_ENUM fileSystemType = fsInfo->ftype;
-        if (fileSystemType == TSK_FS_TYPE_FAT12 ||
-            fileSystemType == TSK_FS_TYPE_FAT16 ||
-            fileSystemType == TSK_FS_TYPE_FAT32 ||
-            fileSystemType == TSK_FS_TYPE_FAT_DETECT) {
-            result = true;
-            break;
-        }
+    if (findFiles.findFilesInImg()) {
+        // we already logged the errors in findFiles.handleError()
+        // Don't exit, just let it continue
     }
-    img->close(img);
-    TskHelper::getInstance().reset();
-    return result;
 }
 
-/*
- * Result true if Current Working Directory file system is FAT.
- */
-bool cwdIsFAT() {
-    char *buffer;
+/**
+* Searches for hives and reports on users
+* @param sessionDir Directory to create user file in
+* @param driveName Display name of drive being processed.
+*/
+static void reportUsers(const std::string &sessionDir, const std::string &driveName) {
+    ReportUtil::consoleOutput(stdout, "%s - Searching for registry\n", driveName.c_str());
+    SetConsoleTitleA(std::string("Analyzing drive " + driveName + " - Searching for registry").c_str());
 
-    if ((buffer = _getcwd(NULL, 0)) == NULL) {
-        consoleOutput(stderr, "Error: _getcwd failed");
-        handleExit(1);
+    // Enumerate Users with RegistryAnalyzer
+    std::string driveLetter = driveName;
+    if (TskHelper::endsWith(driveName, ":")) {
+        driveLetter = driveName.substr(0, driveName.length() - 1);
     }
+    std::string userFilename = sessionDir + "/" + driveLetter + "_users.txt";
+    RegistryAnalyzer registryAnalyzer(userFilename);
+    registryAnalyzer.analyzeSAMUsers();
+}
+
+
 
-    char drive[3];
-    strncpy(drive, buffer, 2);
-    drive[2] = 0;
-    free(buffer);
-    return driveIsFAT(drive);
+
+static void usage() {
+    TFPRINTF(stderr,
+        _TSK_T
+        ("usage: %s [-c configPath]\n"),
+        progname);
+    tsk_fprintf(stderr, "\t-c configPath: The configuration file. Default is logical-imager-config.json\n");
+    tsk_fprintf(stderr, "\t-v: verbose output to stderr\n");
+    tsk_fprintf(stderr, "\t-V: Print version\n");
+    ReportUtil::handleExit(1);
 }
 
 int
@@ -1116,10 +413,7 @@ main(int argc, char **argv1)
 
     int ch;
     TSK_TCHAR **argv;
-    unsigned int ssize = 0;
-    std::vector<std::wstring> imgPaths;
-    const TSK_TCHAR *imgPath;
-    BOOL iFlagUsed = FALSE;
+    const TSK_TCHAR *imgPathArg = NULL; // set to image path if user specified on command line
     TSK_TCHAR *configFilename = (TSK_TCHAR *) NULL;
     LogicalImagerConfiguration *config = NULL;
 
@@ -1134,8 +428,8 @@ main(int argc, char **argv1)
     // On Windows, get the wide arguments (mingw doesn't support wmain)
     argv = CommandLineToArgvW(GetCommandLineW(), &argc);
     if (argv == NULL) {
-        consoleOutput(stderr, "Error getting wide arguments\n");
-        handleExit(1);
+        ReportUtil::consoleOutput(stderr, "Error getting wide arguments\n");
+        ReportUtil::handleExit(1);
     }
 #else
     argv = (TSK_TCHAR **)argv1;
@@ -1162,11 +456,10 @@ main(int argc, char **argv1)
             tsk_version_print(stdout);
             exit(0);
 
+        // undocumented.  Used for testing only.
         case _TSK_T('i'):
-            imgPath = OPTARG;
-            iFlagUsed = TRUE;
+            imgPathArg = OPTARG;
             break;
-
         }
     }
 
@@ -1175,205 +468,179 @@ main(int argc, char **argv1)
         usage();
     }
 
-    // If CWD is FAT, exit with error because it cannot create files greater 4 GB
-    if (cwdIsFAT()) {
-        consoleOutput(stderr, "Error: Writing to FAT device is not supported.");
-        handleExit(1);
-    }
 
+    ////////////////////////////////////////////////////////
+    // Load the configuration file
     if (configFilename == NULL) {
         configFilename = _TSK_T("logical-imager-config.json");
-        consoleOutput(stdout, "Using default configuration file logical-imager-config.json\n");
+        ReportUtil::consoleOutput(stdout, "Using default configuration file logical-imager-config.json\n");
+    }
+    ReportUtil::printDebug("Using config file %s", TskHelper::toNarrow(configFilename).c_str());
+
+    try {
+        config = new LogicalImagerConfiguration(TskHelper::toNarrow(configFilename), (LogicalImagerRuleSet::matchCallback)matchCallback);
+        ReportUtil::SetPromptBeforeExit(config->getPromptBeforeExit());
+        createVHD = config->getCreateVHD();
+    }
+    catch (std::exception &e) {
+        std::cerr << e.what() << std::endl;
+        ReportUtil::handleExit(1);
+    }
+
+    // If CWD is FAT, exit with error because it cannot create files greater 4 GB
+    if (DriveUtil::cwdIsFAT(cwd)) {
+        ReportUtil::consoleOutput(stderr, "Error: Writing to FAT device is not supported.\n");
+        ReportUtil::handleExit(1);
     }
-    printDebug("Using config file %s", TskHelper::toNarrow(configFilename).c_str());
 
-    std::wstring wImgPathName;
-    std::vector<std::wstring> drivesToProcess;
 
-    if (iFlagUsed) {
-        imgPaths.push_back(imgPath);
+    //////////////////////////////////////////////////////
+    // Enumerate what we are going to analyze
+
+    // these two vectors should be kept in sync and each entry should correspond to an entry in the other at the same offset
+    std::vector<std::wstring> imgShortNames; // short name of data
+    std::vector<std::wstring> imgPaths; // full path for data to analyze
+
+    if (imgPathArg != NULL) {
+        // @@@ Ideally, we'd just store the name of the image here and strip out parent folder
+        imgShortNames.push_back(imgPathArg);
+        imgPaths.push_back(imgPathArg);
     } else {
-        if (getDrivesToProcess(drivesToProcess)) {
-            printDebug("Process is running in elevated mode");
-            for (auto it = std::begin(drivesToProcess); it != std::end(drivesToProcess); ++it) {
-                imgPaths.push_back(std::wstring(_TSK_T("\\\\.\\")) + *it);
+        if (getDrivesToProcess(imgShortNames)) {
+            ReportUtil::printDebug("Process is running in elevated mode");
+            for (auto it = std::begin(imgShortNames); it != std::end(imgShortNames); ++it) {
+                imgPaths.push_back(std::wstring(L"\\\\.\\") + *it);
             }
         }
         else {
-            consoleOutput(stderr, "Process is not running in elevated mode\n");
-            handleExit(1);
+            ReportUtil::consoleOutput(stderr, "Process is not running in elevated mode\n");
+            ReportUtil::handleExit(1);
         }
     }
 
-    try {
-        config = new LogicalImagerConfiguration(TskHelper::toNarrow(configFilename), (LogicalImagerRuleSet::matchCallback)matchCallback);
-        promptBeforeExit = config->getPromptBeforeExit();
-    }
-    catch (std::exception &e) {
-        std::cerr << e.what() << std::endl;
-        handleExit(1);
-    }
 
+    /////////////////////////////////////////////////////////////////////
+    // Now that we've verified everything, let's make an output folder
     // create a directory with hostname_timestamp
-    std::string directoryPath;
-    if (createDirectory(directoryPath) == -1) {
-        consoleOutput(stderr, "Failed to create directory %s\n", directoryPath.c_str());
-        handleExit(1);
+    std::string sessionDir;
+    if (createSessionDirectory(sessionDir) == -1) {
+        ReportUtil::consoleOutput(stderr, "Failed to create directory %s\n", sessionDir.c_str());
+        ReportUtil::handleExit(1);
     }
 
-    std::string consoleFileName = directoryPath + "/console.txt";
-    openConsoleOutput(consoleFileName);
-
-    consoleOutput(stdout, "Created directory %s\n", directoryPath.c_str());
-
-    // copy the config file into the output directoryPath
-    std::ifstream src(TskHelper::toNarrow(configFilename), std::ios::binary);
-    std::ofstream dst(directoryPath + "/config.json", std::ios::binary);
-    dst << src.rdbuf();
-    dst.close();
-    src.close();
+    ReportUtil::initialize(sessionDir);
 
-    std::string alertFileName = directoryPath + "/SearchResults.txt";
-    openAlert(alertFileName);
+    ReportUtil::consoleOutput(stdout, "Created directory %s\n", sessionDir.c_str());
+    ReportUtil::copyConfigFile(configFilename);
 
     std::list<std::pair<TSK_IMG_INFO *, std::string>> imgFinalizePending;
+    fileExtractor = new FileExtractor(createVHD, cwd, sessionDir);
 
     // Loop through all images
     for (size_t i = 0; i < imgPaths.size(); ++i) {
-        const TSK_TCHAR *image = (TSK_TCHAR *)imgPaths[i].c_str();
-        driveToProcess = iFlagUsed ? TskHelper::toNarrow(imgPaths[i]) : TskHelper::toNarrow(drivesToProcess[i]);
-        printDebug("Processing drive %s", driveToProcess.c_str());
-        consoleOutput(stdout, "Analyzing drive %zi of %zu (%s)\n", (size_t) i+1, imgPaths.size(), driveToProcess.c_str());
-        SetConsoleTitleA(std::string("Analyzing drive " + TskHelper::intToStr((long)i+1) + " of " + TskHelper::intToStr(imgPaths.size()) + " (" + driveToProcess + ")").c_str());
-
-        if (isDriveLocked(driveToProcess) == 1) {
-            consoleOutput(stdout, "Skipping drive %s because it is bitlocked.\n", driveToProcess.c_str());
+        const TSK_TCHAR *imagePath = (TSK_TCHAR *)imgPaths[i].c_str();
+        std::string imageShortName = TskHelper::toNarrow(imgShortNames[i]);
+
+        ReportUtil::printDebug("Processing drive %s", imageShortName.c_str());
+        ReportUtil::consoleOutput(stdout, "Analyzing drive %zi of %zu (%s)\n", (size_t) i+1, imgPaths.size(), imageShortName.c_str());
+        SetConsoleTitleA(std::string("Analyzing drive " + TskHelper::intToStr((long)i+1) + " of " + TskHelper::intToStr(imgPaths.size()) + " (" + imageShortName + ")").c_str());
+
+        if (DriveUtil::isDriveLocked(imageShortName) == 1) {
+            ReportUtil::consoleOutput(stdout, "Skipping drive %s because it is bitlocked.\n", imageShortName.c_str());
             continue;
         }
 
-        if (driveToProcess.back() == ':') {
-            driveToProcess = driveToProcess.substr(0, driveToProcess.size() - 1);
+        TSK_IMG_INFO *img;
+        img = TskHelper::addFSFromImage(imagePath);
+        if (img == NULL) {
+            continue;
         }
-        outputVHDFilename = (iFlagUsed ? "sparse_image" : driveToProcess) + ".vhd";
-        std::string outputFileName = directoryPath + "/" + outputVHDFilename;
-        std::wstring outputFileNameW = TskHelper::toWide(outputFileName);
 
-        if (hasTskLogicalImager(image)) {
-            consoleOutput(stdout, "Skipping drive %s because tsk_logical_imager.exe exists at the root directory.\n", driveToProcess.c_str());
+        if (hasTskLogicalImager()) {
+            ReportUtil::consoleOutput(stdout, "Skipping drive %s because tsk_logical_imager.exe exists at the root directory.\n", imageShortName.c_str());
+            img->close(img);
+            TskHelper::getInstance().reset();
             continue; // Don't process a drive with /tsk_logicial_image.exe at the root
         }
 
-        TSK_IMG_INFO *img;
-        if ((img = tsk_img_open(1, &image, imgtype, ssize)) == NULL) {
-            tsk_error_print(stderr);
-            handleExit(1);
-        }
-
-        if (img->itype == TSK_IMG_TYPE_RAW) {
-            if (tsk_img_writer_create(img, (TSK_TCHAR *)outputFileNameW.c_str()) == TSK_ERR) {
-                tsk_error_print(stderr);
-                consoleOutput(stderr, "Failed to initialize VHD writer\n");
-                handleExit(1);
+        std::string subDirForFiles;
+        if (imgPathArg != NULL) {
+            subDirForFiles = "sparse_image";
+        } else {
+            subDirForFiles = imageShortName;
+            // strip final ":"
+            if (subDirForFiles.back() == ':') {
+                subDirForFiles = subDirForFiles.substr(0, subDirForFiles.size() - 1);
             }
         }
-        else {
-            consoleOutput(stderr, "Image is not a RAW image, VHD will not be created\n");
-        }
+        fileExtractor->initializePerImage(subDirForFiles);
 
-        imgFinalizePending.push_back(std::make_pair(img, driveToProcess));
+        // @@@ SHould probably rename outputLocation for non-VHD files
+        outputLocation = subDirForFiles + (createVHD ? ".vhd" : "");
 
-        TskFindFiles findFiles(config, driveToProcess);
+        bool closeImgNow = true;
 
-        TskHelper::getInstance().reset();
-        TskHelper::getInstance().setImgInfo(img);
-        TSK_VS_INFO *vs_info;
-        if ((vs_info = tsk_vs_open(img, 0, TSK_VS_TYPE_DETECT)) == NULL) {
-            printDebug("No volume system found. Looking for file system");
-            openFs(img, 0);
-        }
-        else {
-            // process the volume system
-            //fprintf(stdout, "Partition:\n");
-            for (TSK_PNUM_T i = 0; i < vs_info->part_count; i++) {
-                const TSK_VS_PART_INFO *vs_part = tsk_vs_part_get(vs_info, i);
-                //fprintf(stdout, "#%i: %s Start: %s Length: %s\n",
-                //    i, vs_part->desc, std::to_string(vs_part->start).c_str(), std::to_string(vs_part->len).c_str());
-                if ((vs_part->flags & TSK_VS_PART_FLAG_UNALLOC) || (vs_part->flags & TSK_VS_PART_FLAG_META)) {
-                    continue;
-                }
-                openFs(img, vs_part->start * vs_part->vs->block_size);
-            }
-            tsk_vs_close(vs_info);
-        }
+        // Setup the VHD for this drive (if we are making one)
+        if (createVHD) {
+            if (img->itype == TSK_IMG_TYPE_RAW) {
+                std::string outputFileName = sessionDir + "/" + outputLocation;
 
-        consoleOutput(stdout, "%s - Searching for full path files\n", driveToProcess.c_str());
-        SetConsoleTitleA(std::string("Analyzing drive " + driveToProcess + " - Searching for full path files").c_str());
-
-        const std::list<TSK_FS_INFO *> fsList = TskHelper::getInstance().getFSInfoList();
-        TSKFileNameInfo filenameInfo;
-        const std::vector<std::pair<const RuleMatchResult *, std::list<std::string>>> fullFilePathsRules = config->getFullFilePaths();
-        for (std::vector<std::pair<const RuleMatchResult *, std::list<std::string>>>::const_iterator iter = fullFilePathsRules.begin(); iter != fullFilePathsRules.end(); ++iter) {
-            const RuleMatchResult *matchResult = iter->first;
-            const std::list<std::string> filePaths = iter->second;
-            TSK_FS_FILE *fs_file;
-            for (std::list<TSK_FS_INFO *>::const_iterator fsListIter = fsList.begin(); fsListIter != fsList.end(); ++fsListIter) {
-                for (std::list<std::string>::const_iterator iter = filePaths.begin(); iter != filePaths.end(); ++iter) {
-                    int retval = TskHelper::getInstance().path2Inum(*fsListIter, iter->c_str(), false, filenameInfo, NULL, &fs_file);
-                    if (retval == 0 && fs_file != NULL) {
-                        std::string filename = getFilename(*iter);
-                        std::string parent = getPathName(*iter);
-                        // create a TSK_FS_NAME for alert purpose
-                        fs_file->name = new TSK_FS_NAME();
-                        fs_file->name->name = (char *)tsk_malloc(strlen(filename.c_str()) + 1);
-                        strcpy(fs_file->name->name, filename.c_str());
-                        matchCallback(matchResult, fs_file, parent.c_str());
-
-                        tsk_fs_file_close(fs_file);
-                    }
+                if (tsk_img_writer_create(img, (TSK_TCHAR *)TskHelper::toWide(outputFileName).c_str()) == TSK_ERR) {
+                    ReportUtil::consoleOutput(stderr, "Failed to initialize VHD writer, reason: %s\n", tsk_error_get());
+                    ReportUtil::handleExit(1);
                 }
+                imgFinalizePending.push_back(std::make_pair(img, imageShortName));
+                closeImgNow = false;
+            }
+            else {
+                ReportUtil::consoleOutput(stderr, "Input is not a live device or raw imagePath, VHD will not be created\n");
             }
         }
 
-        consoleOutput(stdout, "%s - Searching for registry\n", driveToProcess.c_str());
-        SetConsoleTitleA(std::string("Analyzing drive " + driveToProcess + " - Searching for registry").c_str());
-
-        string usersFileName = directoryPath + "/users.txt";
+        ////////////////////////////////////////////////
+        // Enumerate the file and volume systems that we'll need for the various searches
+        TskHelper::getInstance().enumerateFileAndVolumeSystems(img);
 
-        // Enumerate Users with RegistryAnalyzer
-        RegistryAnalyzer registryAnalyzer(usersFileName);
-        registryAnalyzer.analyzeSAMUsers();
+        ////////////////////////////////////////////////////////
+        // do the work
 
-        TskHelper::getInstance().reset();
+        // search for files based on full path
+        searchFilesByFullPath(config, imageShortName);
 
-        if (findFiles.openImageHandle(img)) {
-            tsk_error_print(stderr);
-            consoleOutput(stderr, "Failed to open image\n");
-            handleExit(1);
+        // Get users
+        std::string prefix;
+        if (imgPathArg != NULL) {
+            prefix = "sparse_image";
+        } else {
+            prefix = imageShortName;
         }
+        reportUsers(sessionDir, prefix);
 
-        consoleOutput(stdout, "%s - Searching for files by attribute\n", driveToProcess.c_str());
-        SetConsoleTitleA(std::string("Analyzing drive " + driveToProcess + " - Searching for files by attribute").c_str());
+        // We no longer need the cached files
+        TskHelper::getInstance().reset();
 
-        if (findFiles.findFilesInImg()) {
-            // we already logged the errors in findFiles.handleError()
-            // Don't exit, just let it continue
+        // Full scan of drive for files based on extension, etc.
+        searchFilesByAttribute(config, imageShortName, img);
+
+        if (closeImgNow) {
+            // close the image, if not creating VHD.
+            img->close(img);
         }
     }
 
-    // close alert file before tsk_img_writer_finish, which may take a long time.
-    closeAlert();
+    // close report file before tsk_img_writer_finish, which may take a long time.
+    ReportUtil::closeReport();
 
     // Delayed finialize image write
     for (auto it = std::begin(imgFinalizePending); it != std::end(imgFinalizePending); ++it) {
         TSK_IMG_INFO *img = it->first;
         if (img->itype == TSK_IMG_TYPE_RAW) {
-            if (config->getFinalizeImagerWriter()) {
-                printDebug("finalize image writer for %s", it->second.c_str());
-                consoleOutput(stdout, "Copying remainder of %s\n", it->second.c_str());
+            if (createVHD && config->getFinalizeImagerWriter()) {
+                ReportUtil::printDebug("finalize image writer for %s", it->second.c_str());
+                ReportUtil::consoleOutput(stdout, "Copying remainder of %s\n", it->second.c_str());
                 SetConsoleTitleA(std::string("Copying remainder of " + it->second).c_str());
                 if (tsk_img_writer_finish(img) == TSK_ERR) {
-                    tsk_error_print(stderr);
-                    consoleOutput(stderr, "Error finishing VHD for %s\n", it->second.c_str());
+                    ReportUtil::consoleOutput(stderr, "Error finishing VHD for %s: reason %s\n", it->second.c_str(), tsk_error_get());
                 }
             }
         }
@@ -1383,6 +650,9 @@ main(int argc, char **argv1)
     if (config) {
         delete config;
     }
-    printDebug("Exiting");
-    handleExit(0);
+    if (fileExtractor) {
+        delete fileExtractor;
+    }
+    ReportUtil::printDebug("Exiting");
+    ReportUtil::handleExit(0);
 }
diff --git a/travis_build.sh b/travis_build.sh
deleted file mode 100755
index ed850b2c59d04acfd3dc0c3000013bffad360294..0000000000000000000000000000000000000000
--- a/travis_build.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/sh
-set -ex
-installLib() {
-	wget https://github.com/libyal/$1/releases/download/$2/$1-alpha-$2.tar.gz
-	tar -xzf $1-alpha-$2.tar.gz
-	cd $1-$2
-	if test ${TRAVIS_OS_NAME} = "linux"; then
-		./configure -prefix=/usr > /dev/null
-	else
-		./configure > /dev/null
-	fi
-	make > /dev/null && sudo make install > /dev/null
-	cd ..
-}
-
-if test ${TRAVIS_OS_NAME} = "linux"; then
-	sudo apt-get -qq update
-	sudo apt-get -y install libafflib-dev libewf-dev libpq-dev autopoint libsqlite3-dev ant libcppunit-dev wget openssl
-elif test ${TRAVIS_OS_NAME} = "osx"; then
-	export PATH=${PATH}:/usr/local/opt/gettext/bin
-	brew install ant libewf gettext cppunit afflib openssl
-fi
-installLib libvhdi 20181227
-installLib libvmdk 20181227
-./bootstrap && ./configure --prefix=/usr && make > /dev/null
-cd bindings/java/ && ant -q dist-PostgreSQL
diff --git a/travis_install_libs.sh b/travis_install_libs.sh
new file mode 100755
index 0000000000000000000000000000000000000000..04638d4bcb8dad6e48c1a70e3f560aed11ccc6a1
--- /dev/null
+++ b/travis_install_libs.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+set -ex
+installLib() {
+	wget https://github.com/libyal/$1/releases/download/$2/$1-alpha-$2.tar.gz
+	tar -xzf $1-alpha-$2.tar.gz
+	cd $1-$2
+	if test ${TRAVIS_OS_NAME} = "linux"; then
+		./configure -prefix=/usr > /dev/null
+	else
+		./configure > /dev/null
+	fi
+	make > /dev/null && sudo make install > /dev/null
+	cd ..
+}
+
+installLib libvhdi 20181227
+installLib libvmdk 20181227
+
diff --git a/tsk/Makefile.am b/tsk/Makefile.am
index 9fc16412a3a507a9e82e1c2dd542759eacbdee82..232303ea5f6ea14dc6f533ca8e10f3bca2610ccb 100644
--- a/tsk/Makefile.am
+++ b/tsk/Makefile.am
@@ -8,6 +8,6 @@ libtsk_la_LIBADD = base/libtskbase.la img/libtskimg.la \
     vs/libtskvs.la fs/libtskfs.la hashdb/libtskhashdb.la \
     auto/libtskauto.la pool/libtskpool.la util/libtskutil.la
 # current:revision:age
-libtsk_la_LDFLAGS = -version-info 18:0:5 $(LIBTSK_LDFLAGS)
+libtsk_la_LDFLAGS = -version-info 19:0:0 $(LIBTSK_LDFLAGS)
 
 EXTRA_DIST = tsk_tools_i.h docs/Doxyfile docs/*.dox docs/*.html
diff --git a/tsk/auto/db_postgresql.cpp b/tsk/auto/db_postgresql.cpp
index fd63f4418c4a821f1b0817f20ecebb3094b89368..83653cf7b9b7b2ac695fbc8cbcf5ae3695deed60 100755
--- a/tsk/auto/db_postgresql.cpp
+++ b/tsk/auto/db_postgresql.cpp
@@ -657,20 +657,32 @@ int TskDbPostgreSQL::initialize() {
             "insert into tsk_event_types(event_type_id, display_name, super_type_id) values(7, 'Changed', 1);"
             , "Error initializing tsk_event_types table rows: %s\n") ||
         attempt_exec(
+			/*
+			* Regarding the timeline event tables schema, note that several columns
+			* in the tsk_event_descriptions table seem, at first glance, to be
+			* attributes of events rather than their descriptions and would appear
+			* to belong in tsk_events table instead. The rationale for putting the
+			* data source object ID, content object ID, artifact ID and the flags
+			* indicating whether or not the event source has a hash set hit or is
+			* tagged were motivated by the fact that these attributes are identical
+			* for each event in a set of file system file MAC time events. The
+			* decision was made to avoid duplication and save space by placing this
+			* data in the tsk_event-descriptions table.
+			*/
             "CREATE TABLE tsk_event_descriptions ( "
             " event_description_id BIGSERIAL PRIMARY KEY, "
             " full_description TEXT NOT NULL, "
             " med_description TEXT, "
             " short_description TEXT,"
             " data_source_obj_id BIGINT NOT NULL, "
-            " file_obj_id BIGINT NOT NULL, "
+            " content_obj_id BIGINT NOT NULL, "
             " artifact_id BIGINT, "
             " hash_hit INTEGER NOT NULL, " //boolean 
             " tagged INTEGER NOT NULL, " //boolean 
             " FOREIGN KEY(data_source_obj_id) REFERENCES data_source_info(obj_id), "
-            " FOREIGN KEY(file_obj_id) REFERENCES tsk_objects(obj_id), "
+            " FOREIGN KEY(content_obj_id) REFERENCES tsk_objects(obj_id), "
             " FOREIGN KEY(artifact_id) REFERENCES blackboard_artifacts(artifact_id) ,"
-			" UNIQUE (full_description, file_obj_id, artifact_id))",
+			" UNIQUE (full_description, content_obj_id, artifact_id))",
             "Error creating tsk_event_descriptions table: %s\n")
         ||
         attempt_exec(
@@ -678,7 +690,7 @@ int TskDbPostgreSQL::initialize() {
             " event_id BIGSERIAL PRIMARY KEY, "
             " event_type_id BIGINT NOT NULL REFERENCES tsk_event_types(event_type_id) ,"
             " event_description_id BIGINT NOT NULL REFERENCES tsk_event_descriptions(event_description_id) ,"
-            " time INTEGER NOT NULL , "
+            " time BIGINT NOT NULL , "
 			" UNIQUE (event_type_id, event_description_id, time))"
             , "Error creating tsk_events table: %s\n")
         ||
@@ -751,8 +763,8 @@ int TskDbPostgreSQL::createIndexes() {
         //tsk_events indices
         attempt_exec("CREATE INDEX events_data_source_obj_id  ON tsk_event_descriptions(data_source_obj_id);",
             "Error creating events_data_source_obj_id index on tsk_event_descriptions: %s\n") ||
-        attempt_exec("CREATE INDEX events_file_obj_id  ON tsk_event_descriptions(file_obj_id);",
-            "Error creating events_file_obj_id index on tsk_event_descriptions: %s\n") ||
+        attempt_exec("CREATE INDEX events_content_obj_id  ON tsk_event_descriptions(content_obj_id);",
+            "Error creating events_content_obj_id index on tsk_event_descriptions: %s\n") ||
         attempt_exec("CREATE INDEX events_artifact_id  ON tsk_event_descriptions(artifact_id);",
             "Error creating events_artifact_id index on tsk_event_descriptions: %s\n") ||
         attempt_exec(
@@ -1063,7 +1075,7 @@ int TskDbPostgreSQL::addFsFile(TSK_FS_FILE * fs_file,
 }
 
 
-int TskDbPostgreSQL::addMACTimeEvents(char*& zSQL, const int64_t data_source_obj_id, const int64_t file_obj_id,
+int TskDbPostgreSQL::addMACTimeEvents(char*& zSQL, const int64_t data_source_obj_id, const int64_t content_obj_id,
                                       std::map<int64_t, time_t> timeMap, const char* full_description)
 {
     int64_t event_description_id = -1;
@@ -1071,28 +1083,28 @@ int TskDbPostgreSQL::addMACTimeEvents(char*& zSQL, const int64_t data_source_obj
     //for each  entry (type ->time)
     for (const auto entry : timeMap)
     {
-        const long long time = entry.second;
+        const time_t time = entry.second;
 
 
-        if (time == 0)
+        if (time <= 0)
         {
-            //we skip any MAC time events with time == 0 since 0 is usually a bogus time and not helpfull 
+            //we skip any MAC time events with time == 0 since 0 is usually a bogus time and not helpfull. time can't be negative either.
             continue;
         }
         if (event_description_id == -1)
         {
             if (0 > snprintf(zSQL, 2048 - 1,
-                             "INSERT INTO tsk_event_descriptions ( data_source_obj_id, file_obj_id , artifact_id, full_description, hash_hit, tagged) "
+                             "INSERT INTO tsk_event_descriptions ( data_source_obj_id, content_obj_id , artifact_id, full_description, hash_hit, tagged) "
                              " VALUES ("
                              "%" PRId64 "," // data_source_obj_id
-                             "%" PRId64 "," // file_obj_id
+                             "%" PRId64 "," // content_obj_id
                              "NULL," // fixed artifact_id
                              "%s," // full_description
                              "0," // fixed hash_hit
                              "0" // fixed tagged
                              ") RETURNING event_description_id",
                              data_source_obj_id,
-                             file_obj_id,
+                             content_obj_id,
                              full_description))
             {
                 return 1;
@@ -1120,7 +1132,7 @@ int TskDbPostgreSQL::addMACTimeEvents(char*& zSQL, const int64_t data_source_obj
                          "%" PRIu64 ")", // time
                          entry.first,
                          event_description_id,
-                         time))
+						(unsigned long long) time))
         {
             return 1;
         };
diff --git a/tsk/auto/db_sqlite.cpp b/tsk/auto/db_sqlite.cpp
index 1f95b0c67b6dbcc33eb8640f06bb5853244c115c..3f5d1598ad241130992ec531d37e466c85136938 100755
--- a/tsk/auto/db_sqlite.cpp
+++ b/tsk/auto/db_sqlite.cpp
@@ -424,17 +424,29 @@ TskDbSqlite::initialize()
 	        , "Error initializing event_types table rows: %s\n")
 	    ||
 	    attempt_exec(
+			/*
+			* Regarding the timeline event tables schema, note that several columns
+			* in the tsk_event_descriptions table seem, at first glance, to be
+			* attributes of events rather than their descriptions and would appear
+			* to belong in tsk_events table instead. The rationale for putting the
+			* data source object ID, content object ID, artifact ID and the flags
+			* indicating whether or not the event source has a hash set hit or is
+			* tagged were motivated by the fact that these attributes are identical
+			* for each event in a set of file system file MAC time events. The
+			* decision was made to avoid duplication and save space by placing this
+			* data in the tsk_event-descriptins table.
+			*/
 	        "CREATE TABLE tsk_event_descriptions ( "
 	        " event_description_id INTEGER PRIMARY KEY, "
 	        " full_description TEXT NOT NULL, "
 	        " med_description TEXT, "
 	        " short_description TEXT,"
 	        " data_source_obj_id INTEGER NOT NULL REFERENCES data_source_info(obj_id), "
-	        " file_obj_id INTEGER NOT NULL REFERENCES tsk_objects(obj_id), "
+	        " content_obj_id INTEGER NOT NULL REFERENCES tsk_objects(obj_id), "
 	        " artifact_id INTEGER REFERENCES blackboard_artifacts(artifact_id), "
 	        " hash_hit INTEGER NOT NULL, " //boolean 
 	        " tagged INTEGER NOT NULL, " //boolean 
-			" UNIQUE (full_description, file_obj_id, artifact_id))",
+			" UNIQUE (full_description, content_obj_id, artifact_id))",
 	        "Error creating tsk_event_event_types table: %4\n")
 	    ||
 	    attempt_exec(
@@ -442,7 +454,7 @@ TskDbSqlite::initialize()
 	        " event_id INTEGER PRIMARY KEY, "
 	        " event_type_id BIGINT NOT NULL REFERENCES tsk_event_types(event_type_id) ,"
 	        " event_description_id BIGINT NOT NULL REFERENCES tsk_event_descriptions(event_description_id) ,"
-	        " time INTEGER NOT NULL , "
+	        " time BIGINT NOT NULL , "
 			" UNIQUE (event_type_id, event_description_id, time))"
 	        , "Error creating tsk_events table: %s\n")
 
@@ -518,8 +530,8 @@ int TskDbSqlite::createIndexes()
         //events indices
         attempt_exec("CREATE INDEX events_data_source_obj_id  ON tsk_event_descriptions(data_source_obj_id);",
                      "Error creating events_data_source_obj_id index on tsk_event_descriptions: %s\n") ||
-        attempt_exec("CREATE INDEX events_file_obj_id  ON tsk_event_descriptions(file_obj_id);",
-                     "Error creating events_file_obj_id index on tsk_event_descriptions: %s\n") ||
+        attempt_exec("CREATE INDEX events_content_obj_id  ON tsk_event_descriptions(content_obj_id);",
+                     "Error creating events_content_obj_id index on tsk_event_descriptions: %s\n") ||
         attempt_exec("CREATE INDEX events_artifact_id  ON tsk_event_descriptions(artifact_id);",
                      "Error creating events_artifact_id index on tsk_event_descriptions: %s\n") ||
         attempt_exec(
@@ -1000,7 +1012,7 @@ int64_t TskDbSqlite::findParObjId(const TSK_FS_FILE* fs_file, const char* parent
     return parObjId;
 }
 
-int TskDbSqlite::addMACTimeEvents(const int64_t data_source_obj_id, const int64_t file_obj_id,
+int TskDbSqlite::addMACTimeEvents(const int64_t data_source_obj_id, const int64_t content_obj_id,
                                   std::map<int64_t, time_t> timeMap, const char* full_description)
 {
     int64_t event_description_id = -1;
@@ -1008,29 +1020,29 @@ int TskDbSqlite::addMACTimeEvents(const int64_t data_source_obj_id, const int64_
     //for each  entry (type ->time)
     for (const auto entry : timeMap)
     {
-        const long long time = entry.second;
+        const time_t time = entry.second;
 
 
-        if (time == 0)
+        if (time <= 0)
         {
-            //we skip any MAC time events with time == 0 since 0 is usually a bogus time and not helpfull 
+            //we skip any MAC time events with time == 0 since 0 is usually a bogus time and not helpfull. time can't be negative either. 
             continue;
         }
         if (event_description_id == -1)
         {
             //insert common description for file
             char* descriptionSql = sqlite3_mprintf(
-                "INSERT INTO tsk_event_descriptions ( data_source_obj_id, file_obj_id , artifact_id,  full_description, hash_hit, tagged) "
+                "INSERT INTO tsk_event_descriptions ( data_source_obj_id, content_obj_id , artifact_id,  full_description, hash_hit, tagged) "
                 " VALUES ("
                 "%" PRId64 "," // data_source_obj_id
-                "%" PRId64 "," // file_obj_id
+                "%" PRId64 "," // content_obj_id
                 "NULL," // fixed artifact_id
                 "%Q," // full_description
                 "0," // fixed hash_hit
                 "0" // fixed tagged
                 ")",
                 data_source_obj_id,
-                file_obj_id,
+                content_obj_id,
                 full_description);
 
             if (attempt_exec(descriptionSql,
@@ -1053,7 +1065,7 @@ int TskDbSqlite::addMACTimeEvents(const int64_t data_source_obj_id, const int64_
             "%" PRIu64 ")", // time
             entry.first,
             event_description_id,
-            time
+			(unsigned long long) time
         );
 
         if (attempt_exec(
diff --git a/tsk/base/mymalloc.c b/tsk/base/mymalloc.c
index fa2650b4ccb9bc563f39d48f95707eb460f79b12..f819ef86b5fe2644456e87023cffc21cc5d0a808 100644
--- a/tsk/base/mymalloc.c
+++ b/tsk/base/mymalloc.c
@@ -37,7 +37,7 @@ tsk_malloc(size_t len)
     else {
         memset(ptr, 0, len);
     }
-    return (ptr);
+    return ptr;
 }
 
 /* tsk_realloc - reallocate memory and set error values if needed */
@@ -49,5 +49,5 @@ tsk_realloc(void *ptr, size_t len)
         tsk_error_set_errno(TSK_ERR_AUX_MALLOC);
         tsk_error_set_errstr("tsk_realloc: %s (%" PRIuSIZE" requested)", strerror(errno), len);
     }
-    return (ptr);
+    return ptr;
 }
diff --git a/tsk/base/tsk_base.h b/tsk/base/tsk_base.h
old mode 100755
new mode 100644
index abfda3e1ac01ad122dfe48754e04fede9db56afc..fb3e50d9f8f5f4a323263a9b863b2e28af50cb2c
--- a/tsk/base/tsk_base.h
+++ b/tsk/base/tsk_base.h
@@ -39,11 +39,11 @@
  * 3.1.2b1 would be 0x03010201.  Snapshot from Jan 2, 2003 would be
  * 0xFF030102.
  * See TSK_VERSION_STR for string form. */
-#define TSK_VERSION_NUM 0x040607ff
+#define TSK_VERSION_NUM 0x040700ff
 
 /** Version of code in string form. See TSK_VERSION_NUM for
  * integer form. */
-#define TSK_VERSION_STR "4.6.7"
+#define TSK_VERSION_STR "4.7.0"
 
 
 /* include the TSK-specific header file that we created in autoconf
diff --git a/tsk/fs/ext2fs.c b/tsk/fs/ext2fs.c
index 7950b36471587b96ece60a91037d3d4dfe5aa8af..5a480856e29432030f40a9c6e60d22ed9c6e64cc 100755
--- a/tsk/fs/ext2fs.c
+++ b/tsk/fs/ext2fs.c
@@ -63,7 +63,7 @@ static uint8_t
 test_root(uint32_t a, uint32_t b)
 {
     if (a == 0) {
-        return (b == 0);
+        return b == 0;
     }
     else if (b == 0) {
         return 0;
@@ -81,7 +81,7 @@ test_root(uint32_t a, uint32_t b)
     for (b2 = b; b2 < a; b2 *= b) {}
  
     // was it an exact match?
-    return (b2 == a);
+    return b2 == a;
 }
 
 /** \internal
@@ -3462,5 +3462,5 @@ ext2fs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset,
 
     tsk_init_lock(&ext2fs->lock);
 
-    return (fs);
+    return fs;
 }
diff --git a/tsk/fs/ffs.c b/tsk/fs/ffs.c
index 923dcda75856809a8b06d4a996ca90f0d9179d7f..636b6ec0f1a084eb3dea192a7adb6913879dc42d 100755
--- a/tsk/fs/ffs.c
+++ b/tsk/fs/ffs.c
@@ -2248,5 +2248,5 @@ ffs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset, TSK_FS_TYPE_ENUM ftype, uint
 
     tsk_init_lock(&ffs->lock);
 
-    return (fs);
+    return fs;
 }
diff --git a/tsk/fs/fs_inode.c b/tsk/fs/fs_inode.c
index 0889740209ebe97f500089d6da21f5d93ff5d5fd..2a86897e75e7b5c60baf5169752e9a986e292d0e 100644
--- a/tsk/fs/fs_inode.c
+++ b/tsk/fs/fs_inode.c
@@ -56,7 +56,7 @@ tsk_fs_meta_alloc(size_t a_buf_len)
     // assign the id so we know the structure is still alloc
     fs_meta->tag = TSK_FS_META_TAG;
 
-    return (fs_meta);
+    return fs_meta;
 }
 
 
@@ -84,7 +84,7 @@ tsk_fs_meta_realloc(TSK_FS_META * a_fs_meta, size_t a_buf_len)
             return NULL;
         }
     }
-    return (a_fs_meta);
+    return a_fs_meta;
 }
 
 
diff --git a/tsk/fs/rawfs.c b/tsk/fs/rawfs.c
index c38a09b4660d94d5f3c5dce9708454190f192de9..efec1be45a511b05384ded9c98c70968ccf2ee85 100644
--- a/tsk/fs/rawfs.c
+++ b/tsk/fs/rawfs.c
@@ -99,5 +99,5 @@ rawfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset)
     fs->jopen = tsk_fs_nofs_jopen;
     fs->journ_inum = 0;
 
-    return (fs);
+    return fs;
 }
diff --git a/tsk/fs/swapfs.c b/tsk/fs/swapfs.c
index 1b6e4c322705eb2e56b98d2eee990d87b8f8b5c7..f316e153e7f1c1fec68a2f65710478345ca862b1 100644
--- a/tsk/fs/swapfs.c
+++ b/tsk/fs/swapfs.c
@@ -97,5 +97,5 @@ swapfs_open(TSK_IMG_INFO * img_info, TSK_OFF_T offset)
     fs->jopen = tsk_fs_nofs_jopen;
     fs->journ_inum = 0;
 
-    return (fs);
+    return fs;
 }
diff --git a/tsk/hashdb/sqlite_hdb.cpp b/tsk/hashdb/sqlite_hdb.cpp
index 5f21f4e522a52c6c299abe0da9714a83d9d59f6f..a0f72196fed6e306d222240da3592fe490f91a4d 100644
--- a/tsk/hashdb/sqlite_hdb.cpp
+++ b/tsk/hashdb/sqlite_hdb.cpp
@@ -256,7 +256,7 @@ uint8_t
         return 0;
     }
     else {
-        return (strncmp(header, SQLITE_FILE_HEADER, strlen(SQLITE_FILE_HEADER)) == 0);
+        return strncmp(header, SQLITE_FILE_HEADER, strlen(SQLITE_FILE_HEADER)) == 0;
     }            
 }
 
diff --git a/tsk/hashdb/tsk_hashdb.c b/tsk/hashdb/tsk_hashdb.c
index fed07db45140791b55fe1e2ce09b3bd35343c88b..9eecd7ef1de1047135f7c9c084f1cd462a9b45f6 100644
--- a/tsk/hashdb/tsk_hashdb.c
+++ b/tsk/hashdb/tsk_hashdb.c
@@ -347,7 +347,7 @@ uint8_t
         return 0;
     }
 
-    return (hdb_info->db_type == TSK_HDB_DBTYPE_IDXONLY_ID);
+    return hdb_info->db_type == TSK_HDB_DBTYPE_IDXONLY_ID;
 }
 
 /**
diff --git a/tsk/img/ewf.cpp b/tsk/img/ewf.cpp
index 2caf6e573100002019c2737ebd2038bc2c7b492b..73ac5f9dc4bb41aae083cef65851b03e963253c7 100755
--- a/tsk/img/ewf.cpp
+++ b/tsk/img/ewf.cpp
@@ -168,10 +168,10 @@ img_file_header_signature_ncmp(const char *filename,
     int fd;
 
     if ((filename == NULL) || (file_header_signature == NULL)) {
-        return (0);
+        return 0;
     }
     if (size_of_signature <= 0) {
-        return (0);
+        return 0;
     }
 
     if ((fd = open(filename, O_RDONLY | O_BINARY)) < 0) {
@@ -192,7 +192,7 @@ img_file_header_signature_ncmp(const char *filename,
 
     match = strncmp(file_header_signature, header, size_of_signature) == 0;
 
-    return (match);
+    return match;
 }
 #endif
 
@@ -409,10 +409,10 @@ ewf_open(int a_num_img,
     }
     ewf_info->md5hash_isset = result;
 
-    int sha1_result = libewf_handle_get_utf8_hash_value_sha1(ewf_info->handle,
+    result = libewf_handle_get_utf8_hash_value_sha1(ewf_info->handle,
         (uint8_t *)ewf_info->sha1hash, 41, &ewf_error);
 
-    if (sha1_result == -1) {
+    if (result == -1) {
         tsk_error_reset();
         tsk_error_set_errno(TSK_ERR_IMG_OPEN);
 
@@ -427,7 +427,7 @@ ewf_open(int a_num_img,
         if (tsk_verbose != 0) {
             tsk_fprintf(stderr, "Error getting SHA1 of EWF file\n");
         }
-        return (NULL);
+        return NULL;
     }
     ewf_info->sha1hash_isset = result;
 
diff --git a/tsk/img/img_writer.cpp b/tsk/img/img_writer.cpp
index a350f9885b56d5136634ce4d3a9379ee842f34c5..2e5a9e42fd99e6f25b5f2811fd4ae9cf695e091d 100755
--- a/tsk/img/img_writer.cpp
+++ b/tsk/img/img_writer.cpp
@@ -395,7 +395,7 @@ static uint32_t generateChecksum(unsigned char * buffer, int len) {
         sum += buffer[i];
     }
 
-    return (~sum);
+    return ~sum;
 }
 
 /*
diff --git a/win32/tsk_logical_imager/tsk_logical_imager.vcxproj b/win32/tsk_logical_imager/tsk_logical_imager.vcxproj
index ce04e5cbccb93c3d1888d0e2c557884354f551f6..a736878eb64f8c29ae49e07628afb71320ce42be 100755
--- a/win32/tsk_logical_imager/tsk_logical_imager.vcxproj
+++ b/win32/tsk_logical_imager/tsk_logical_imager.vcxproj
@@ -444,7 +444,10 @@
     </ProjectReference>
   </ItemGroup>
   <ItemGroup>
+    <ClInclude Include="..\..\tools\logicalimager\DriveUtil.h" />
+    <ClInclude Include="..\..\tools\logicalimager\FileExtractor.h" />
     <ClInclude Include="..\..\tools\logicalimager\LogicalImagerConfiguration.h" />
+    <ClInclude Include="..\..\tools\logicalimager\ReportUtil.h" />
     <ClInclude Include="..\..\tools\logicalimager\UserAccount.h" />
     <ClInclude Include="..\..\tools\logicalimager\json.h" />
     <ClInclude Include="..\..\tools\logicalimager\LogicalImagerDateRule.h" />
@@ -461,13 +464,16 @@
     <ClInclude Include="..\..\tools\logicalimager\RegKey.h" />
     <ClInclude Include="..\..\tools\logicalimager\RegParser.h" />
     <ClInclude Include="..\..\tools\logicalimager\RegVal.h" />
-    <ClInclude Include="..\..\tools\logicalimager\RuleMatchResult.h" />
+    <ClInclude Include="..\..\tools\logicalimager\MatchedRuleInfo.h" />
     <ClInclude Include="..\..\tools\logicalimager\TskFindFiles.h" />
     <ClInclude Include="..\..\tools\logicalimager\TskHelper.h" />
     <ClInclude Include="..\..\tools\logicalimager\Version.h" />
   </ItemGroup>
   <ItemGroup>
+    <ClCompile Include="..\..\tools\logicalimager\DriveUtil.cpp" />
+    <ClCompile Include="..\..\tools\logicalimager\FileExtractor.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\LogicalImagerConfiguration.cpp" />
+    <ClCompile Include="..\..\tools\logicalimager\ReportUtil.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\UserAccount.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\LogicalImagerDateRule.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\LogicalImagerExtensionRule.cpp" />
@@ -482,7 +488,7 @@
     <ClCompile Include="..\..\tools\logicalimager\RegKey.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\RegParser.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\RegVal.cpp" />
-    <ClCompile Include="..\..\tools\logicalimager\RuleMatchResult.cpp" />
+    <ClCompile Include="..\..\tools\logicalimager\MatchedRuleInfo.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\TskFindFiles.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\TskHelper.cpp" />
     <ClCompile Include="..\..\tools\logicalimager\tsk_logical_imager.cpp" />
diff --git a/win32/tsk_logical_imager/tsk_logical_imager.vcxproj.filters b/win32/tsk_logical_imager/tsk_logical_imager.vcxproj.filters
index 226006fdc90f7a6146d21989353c90de02e8fb23..49fdbf3bd93a85ff6945a9c05dc5a498c1094ed3 100755
--- a/win32/tsk_logical_imager/tsk_logical_imager.vcxproj.filters
+++ b/win32/tsk_logical_imager/tsk_logical_imager.vcxproj.filters
@@ -35,9 +35,6 @@
     <ClInclude Include="..\..\tools\logicalimager\LogicalImagerSizeRule.h">
       <Filter>Header Files</Filter>
     </ClInclude>
-    <ClInclude Include="..\..\tools\logicalimager\RuleMatchResult.h">
-      <Filter>Header Files</Filter>
-    </ClInclude>
     <ClInclude Include="..\..\tools\logicalimager\TskFindFiles.h">
       <Filter>Header Files</Filter>
     </ClInclude>
@@ -74,6 +71,18 @@
     <ClInclude Include="..\..\tools\logicalimager\Version.h">
       <Filter>Header Files</Filter>
     </ClInclude>
+    <ClInclude Include="..\..\tools\logicalimager\DriveUtil.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="..\..\tools\logicalimager\ReportUtil.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="..\..\tools\logicalimager\FileExtractor.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
+    <ClInclude Include="..\..\tools\logicalimager\MatchedRuleInfo.h">
+      <Filter>Header Files</Filter>
+    </ClInclude>
   </ItemGroup>
   <ItemGroup>
     <ClCompile Include="..\..\tools\logicalimager\LogicalImagerDateRule.cpp">
@@ -97,9 +106,6 @@
     <ClCompile Include="..\..\tools\logicalimager\LogicalImagerSizeRule.cpp">
       <Filter>Source Files</Filter>
     </ClCompile>
-    <ClCompile Include="..\..\tools\logicalimager\RuleMatchResult.cpp">
-      <Filter>Source Files</Filter>
-    </ClCompile>
     <ClCompile Include="..\..\tools\logicalimager\tsk_logical_imager.cpp">
       <Filter>Source Files</Filter>
     </ClCompile>
@@ -133,5 +139,17 @@
     <ClCompile Include="..\..\tools\logicalimager\LogicalImagerConfiguration.cpp">
       <Filter>Source Files</Filter>
     </ClCompile>
+    <ClCompile Include="..\..\tools\logicalimager\DriveUtil.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="..\..\tools\logicalimager\ReportUtil.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="..\..\tools\logicalimager\FileExtractor.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
+    <ClCompile Include="..\..\tools\logicalimager\MatchedRuleInfo.cpp">
+      <Filter>Source Files</Filter>
+    </ClCompile>
   </ItemGroup>
 </Project>
\ No newline at end of file