diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
index dcb2af3eef7c7c1220fd27a8ac2a7701a2bad6cf..eaaa8f992649d8761e43f647543cad23668ee3ae 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/Case.java
@@ -116,6 +116,7 @@
 import org.sleuthkit.datamodel.SleuthkitCase;
 import org.sleuthkit.datamodel.TskCoreException;
 import org.sleuthkit.datamodel.TskUnsupportedSchemaVersionException;
+import org.sleuthkit.autopsy.coreutils.StopWatch;
 
 /**
  * An Autopsy case. Currently, only one case at a time may be open.
@@ -707,11 +708,15 @@ public static void deleteCurrentCase() throws CaseActionException {
         "Case.exceptionMessage.cannotGetLockToDeleteCase=Cannot delete case because it is open for another user or there is a problem with the coordination service."
     })
     public static void deleteCase(CaseMetadata metadata) throws CaseActionException {
+        StopWatch stopWatch = new StopWatch();
+        stopWatch.start();
         synchronized (caseActionSerializationLock) {
             if (null != currentCase) {
                 throw new CaseActionException(Bundle.Case_exceptionMessage_cannotDeleteCurrentCase());
             }
         }
+        stopWatch.stop();
+        logger.log(Level.INFO, String.format("Used %d s to acquire caseActionSerializationLock (Java monitor in Case class) for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
 
         /*
          * Set up either a GUI progress indicator without a cancel button (can't
@@ -733,10 +738,19 @@ public static void deleteCase(CaseMetadata metadata) throws CaseActionException
                  * cannot be deleted if another node has it open.
                  */
                 progressIndicator.progress(Bundle.Case_progressMessage_checkingForOtherUser());
+                stopWatch.reset();
+                stopWatch.start();
                 try (CoordinationService.Lock dirLock = CoordinationService.getInstance().tryGetExclusiveLock(CategoryNode.CASES, metadata.getCaseDirectory())) {
-                    assert (null != dirLock);
-                    deleteCase(metadata, progressIndicator);
+                    stopWatch.stop();
+                    logger.log(Level.INFO, String.format("Used %d s to acquire case directory coordination service lock for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
+                    if (dirLock != null) {
+                        deleteCase(metadata, progressIndicator);
+                    } else {
+                        throw new CaseActionException(Bundle.Case_creationException_couldNotAcquireDirLock());
+                    }
                 } catch (CoordinationServiceException ex) {
+                    stopWatch.stop();
+                    logger.log(Level.INFO, String.format("Used %d s to fail to acquire case directory coordination service lock for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
                     throw new CaseActionException(Bundle.Case_exceptionMessage_cannotGetLockToDeleteCase(), ex);
                 }
             }
@@ -946,11 +960,13 @@ static Map<Long, String> getImagePaths(SleuthkitCase db) {
         "Case.exceptionMessage.errorsDeletingCase=Errors occured while deleting the case. See the application log for details"
     })
     private static void deleteCase(CaseMetadata metadata, ProgressIndicator progressIndicator) throws CaseActionException {
+        StopWatch stopWatch = new StopWatch();
         boolean errorsOccurred = false;
         if (CaseType.MULTI_USER_CASE == metadata.getCaseType()) {
             /*
              * Delete the case database from the database server.
              */
+            stopWatch.start();
             try {
                 progressIndicator.progress(Bundle.Case_progressMessage_deletingCaseDatabase());
                 CaseDbConnectionInfo db;
@@ -960,10 +976,14 @@ private static void deleteCase(CaseMetadata metadata, ProgressIndicator progress
                         Statement statement = connection.createStatement();) {
                     String deleteCommand = "DROP DATABASE \"" + metadata.getCaseDatabaseName() + "\""; //NON-NLS
                     statement.execute(deleteCommand);
+                    stopWatch.stop();
+                    logger.log(Level.INFO, String.format("Used %d s to delete case database for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
                 }
             } catch (UserPreferencesException | ClassNotFoundException | SQLException ex) {
                 logger.log(Level.SEVERE, String.format("Failed to delete case database %s for %s (%s) in %s", metadata.getCaseDatabaseName(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()), ex);
                 errorsOccurred = true;
+                stopWatch.stop();
+                logger.log(Level.INFO, String.format("Used %d s to fail delete case database for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
             }
         }
 
@@ -973,10 +993,16 @@ private static void deleteCase(CaseMetadata metadata, ProgressIndicator progress
         progressIndicator.progress(Bundle.Case_progressMessage_deletingTextIndex());
         for (KeywordSearchService searchService : Lookup.getDefault().lookupAll(KeywordSearchService.class)) {
             try {
+                stopWatch.reset();
+                stopWatch.start();
                 searchService.deleteTextIndex(metadata);
+                stopWatch.stop();
+                logger.log(Level.INFO, String.format("Used %d s to delete text index for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
             } catch (KeywordSearchServiceException ex) {
                 logger.log(Level.SEVERE, String.format("Failed to delete text index for %s (%s) in %s", metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()), ex);
                 errorsOccurred = true;
+                stopWatch.stop();
+                logger.log(Level.INFO, String.format("Used %d s to fail to delete text index for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
             }
         }
 
@@ -984,9 +1010,16 @@ private static void deleteCase(CaseMetadata metadata, ProgressIndicator progress
          * Delete the case directory.
          */
         progressIndicator.progress(Bundle.Case_progressMessage_deletingCaseDirectory());
+        stopWatch.reset();
+        stopWatch.start();
         if (!FileUtil.deleteDir(new File(metadata.getCaseDirectory()))) {
             logger.log(Level.SEVERE, String.format("Failed to delete case directory for %s (%s) in %s", metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
             errorsOccurred = true;
+            stopWatch.stop();
+            logger.log(Level.INFO, String.format("Used %d s to fail to delete case directory for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
+        } else {
+            stopWatch.stop();
+            logger.log(Level.INFO, String.format("Used %d s to delete case directory for %s (%s) in %s", stopWatch.getElapsedTimeSecs(), metadata.getCaseDisplayName(), metadata.getCaseName(), metadata.getCaseDirectory()));
         }
 
         /*
@@ -1540,11 +1573,13 @@ public void notifyTagDefinitionChanged(String changedTagName) {
     }
 
     /**
-     * Notifies case event subscribers that a central repository comment has been changed.
-     * 
+     * Notifies case event subscribers that a central repository comment has
+     * been changed.
+     *
      * This should not be called from the event dispatch thread (EDT)
-     * 
-     * @param contentId the objectId for the Content which has had its central repo comment changed
+     *
+     * @param contentId  the objectId for the Content which has had its central
+     *                   repo comment changed
      * @param newComment the new value of the comment
      */
     public void notifyCentralRepoCommentChanged(long contentId, String newComment) {
@@ -1800,7 +1835,7 @@ private void open(boolean isNewCase) throws CaseActionException {
                 progressIndicator.progress(Bundle.Case_progressMessage_preparingToOpenCaseResources());
                 acquireSharedCaseDirLock(metadata.getCaseDirectory());
                 try (CoordinationService.Lock resourcesLock = acquireExclusiveCaseResourcesLock(metadata.getCaseDirectory())) {
-                    assert (null != resourcesLock);
+                    assert(resourcesLock != null); // Use reference to avoid compile time warning.
                     open(isNewCase, progressIndicator);
                 } catch (CaseActionException ex) {
                     releaseSharedCaseDirLock(getMetadata().getCaseDirectory());
@@ -2375,7 +2410,7 @@ private void closeAppServiceCaseResources() {
      * @throws CaseActionException with a user-friendly message if the lock
      *                             cannot be acquired.
      */
-    @Messages({"Case.creationException.couldNotAcquireDirLock=Failed to get lock on case directory."})
+    @Messages({"Case.creationException.couldNotAcquireDirLock=Failed to get lock on case directory"})
     private void acquireSharedCaseDirLock(String caseDir) throws CaseActionException {
         try {
             caseDirLock = CoordinationService.getInstance().tryGetSharedLock(CategoryNode.CASES, caseDir, DIR_LOCK_TIMOUT_HOURS, TimeUnit.HOURS);
diff --git a/Core/src/org/sleuthkit/autopsy/casemodule/LocalFilesDSProcessor.java b/Core/src/org/sleuthkit/autopsy/casemodule/LocalFilesDSProcessor.java
index 8d6dcffdd9ffe77f48486ea304ab5b8c9c70c4b2..4d5e3eac36152fb260a42e47347f91f953889301 100644
--- a/Core/src/org/sleuthkit/autopsy/casemodule/LocalFilesDSProcessor.java
+++ b/Core/src/org/sleuthkit/autopsy/casemodule/LocalFilesDSProcessor.java
@@ -369,7 +369,7 @@ public int canProcess(Path dataSourcePath) throws AutoIngestDataSourceProcessorE
     @Override
     public void process(String deviceId, Path dataSourcePath, DataSourceProcessorProgressMonitor progressMonitor, DataSourceProcessorCallback callBack) {
         List<String> filePaths = Arrays.asList(new String[]{dataSourcePath.toString()});
-        run(deviceId, deviceId, filePaths, progressMonitor, callBack);
+        run(deviceId, "", filePaths, progressMonitor, callBack);
     }
 
     /**
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java
index c6301bfb89322770ae7cb15e97ee56a4b48ebecb..0080ade4fbe1cdbb3c227eb02eedf23cb1ca9c04 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/contentviewer/DataContentViewerOtherCases.java
@@ -127,7 +127,7 @@ public void actionPerformed(ActionEvent e) {
                     }
                 } else if (jmi.equals(showCommonalityMenuItem)) {
                     showCommonalityDetails();
-                } 
+                }
             }
         };
 
@@ -419,7 +419,7 @@ private Collection<CorrelationAttributeInstance> getCorrelationAttributesFromNod
         }
 
         // we can correlate based on the MD5 if it is enabled      
-        if (this.file != null && EamDb.isEnabled()) {
+        if (this.file != null && EamDb.isEnabled() && this.file.getSize() > 0) {
             try {
 
                 List<CorrelationAttributeInstance.Type> artifactTypes = EamDb.getInstance().getDefinedCorrelationTypes();
@@ -447,27 +447,23 @@ private Collection<CorrelationAttributeInstance> getCorrelationAttributesFromNod
             } catch (EamDbException | TskCoreException ex) {
                 LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS
             }
-
-        } else {
-
-            // If EamDb not enabled, get the Files default correlation type to allow Other Occurances to be enabled.   
-            if (this.file != null) {
-                String md5 = this.file.getMd5Hash();
-                if (md5 != null && !md5.isEmpty()) {
-                    try {
-                        final CorrelationAttributeInstance.Type fileAttributeType
-                                = CorrelationAttributeInstance.getDefaultCorrelationTypes()
-                                        .stream()
-                                        .filter(attrType -> attrType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID)
-                                        .findAny()
-                                        .get();
-
-                        ret.add(new CorrelationAttributeInstance(fileAttributeType, md5));
-                    } catch (EamDbException ex) {
-                        LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS
-                    } catch (CorrelationAttributeNormalizationException ex) {
-                        LOGGER.log(Level.INFO, String.format("Unable to create CorrelationAttributeInstance for value %s", md5), ex); // NON-NLS
-                    }
+            // If EamDb not enabled, get the Files default correlation type to allow Other Occurances to be enabled.  
+        } else if (this.file != null && this.file.getSize() > 0) {
+            String md5 = this.file.getMd5Hash();
+            if (md5 != null && !md5.isEmpty()) {
+                try {
+                    final CorrelationAttributeInstance.Type fileAttributeType
+                            = CorrelationAttributeInstance.getDefaultCorrelationTypes()
+                                    .stream()
+                                    .filter(attrType -> attrType.getId() == CorrelationAttributeInstance.FILES_TYPE_ID)
+                                    .findAny()
+                                    .get();
+
+                    ret.add(new CorrelationAttributeInstance(fileAttributeType, md5));
+                } catch (EamDbException ex) {
+                    LOGGER.log(Level.SEVERE, "Error connecting to DB", ex); // NON-NLS
+                } catch (CorrelationAttributeNormalizationException ex) {
+                    LOGGER.log(Level.INFO, String.format("Unable to create CorrelationAttributeInstance for value %s", md5), ex); // NON-NLS
                 }
             }
         }
@@ -515,9 +511,9 @@ private void setEarliestCaseDate() {
      * artifact. If the central repo is not enabled, this will only return files
      * from the current case with matching MD5 hashes.
      *
-     * @param corAttr CorrelationAttribute to query for
+     * @param corAttr        CorrelationAttribute to query for
      * @param dataSourceName Data source to filter results
-     * @param deviceId Device Id to filter results
+     * @param deviceId       Device Id to filter results
      *
      * @return A collection of correlated artifact instances
      */
@@ -580,7 +576,7 @@ private Map<UniquePathKey, OtherOccurrenceNodeInstanceData> getCorrelatedInstanc
      * Get all other abstract files in the current case with the same MD5 as the
      * selected node.
      *
-     * @param corAttr The CorrelationAttribute containing the MD5 to search for
+     * @param corAttr  The CorrelationAttribute containing the MD5 to search for
      * @param openCase The current case
      *
      * @return List of matching AbstractFile objects
@@ -657,11 +653,9 @@ public boolean isSupported(Node node) {
         // - The central repo is disabled and the backing file has a valid MD5 hash
         this.file = this.getAbstractFileFromNode(node);
         if (EamDb.isEnabled()) {
-            return this.file != null
-                    && this.file.getSize() > 0
-                    && !getCorrelationAttributesFromNode(node).isEmpty();
+            return !getCorrelationAttributesFromNode(node).isEmpty();
         } else {
-            return this.file != null
+           return this.file != null
                     && this.file.getSize() > 0
                     && ((this.file.getMd5Hash() != null) && (!this.file.getMd5Hash().isEmpty()));
         }
@@ -733,8 +727,8 @@ private void populateTable(Node node) {
      * Adjust a given column for the text provided.
      *
      * @param columnIndex The index of the column to adjust.
-     * @param text The text whose length will be used to adjust the column
-     * width.
+     * @param text        The text whose length will be used to adjust the
+     *                    column width.
      */
     private void setColumnWidthToText(int columnIndex, String text) {
         TableColumn column = otherCasesTable.getColumnModel().getColumn(columnIndex);
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java
index 836f47ea85278e1dc0c0af79d86b8bbc5c405245..e1e87024f00a0c193c5ad8d6df056b6c009ee2f2 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/AbstractSqlEamDb.java
@@ -2960,11 +2960,10 @@ private CorrelationCase getEamCaseFromResultSet(ResultSet resultSet) throws SQLE
                     resultSet.getString("poc_phone"));
         }
 
-        CorrelationCase eamCase = new CorrelationCase(resultSet.getInt("case_id"), resultSet.getString("case_uid"), eamOrg, resultSet.getString("case_name"), 
-                resultSet.getString("creation_date"), resultSet.getString("case_number"), resultSet.getString("examiner_name"), 
+        CorrelationCase eamCase = new CorrelationCase(resultSet.getInt("case_id"), resultSet.getString("case_uid"), eamOrg, resultSet.getString("case_name"),
+                resultSet.getString("creation_date"), resultSet.getString("case_number"), resultSet.getString("examiner_name"),
                 resultSet.getString("examiner_email"), resultSet.getString("examiner_phone"), resultSet.getString("notes"));
 
-
         return eamCase;
     }
 
@@ -3080,6 +3079,7 @@ public void upgradeSchema() throws EamDbException, SQLException {
 
         ResultSet resultSet = null;
         Statement statement = null;
+        PreparedStatement preparedStatement = null;
         Connection conn = null;
         try {
 
@@ -3114,6 +3114,10 @@ public void upgradeSchema() throws EamDbException, SQLException {
                 logger.log(Level.INFO, "Central Repository is up to date");
                 return;
             }
+            if (dbSchemaVersion.compareTo(CURRENT_DB_SCHEMA_VERSION) > 0) {
+                logger.log(Level.INFO, "Central Repository is of newer version than software creates");
+                return;
+            }
 
             // Update from 1.0 to 1.1
             if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 1)) < 0) {
@@ -3126,7 +3130,56 @@ public void upgradeSchema() throws EamDbException, SQLException {
                 // regardless of whether this succeeds.
                 EamDbUtil.insertDefaultOrganization(conn);
             }
+            //Update to 1.2
+            if (dbSchemaVersion.compareTo(new CaseDbSchemaVersionNumber(1, 2)) < 0) {
+                //update central repository to be able to store new correlation attributes 
+                EamDbPlatformEnum selectedPlatform = EamDbPlatformEnum.getSelectedPlatform();
+                final String addSsidTableTemplate;
+                final String addCaseIdIndexTemplate;
+                final String addDataSourceIdIndexTemplate;
+                final String addValueIndexTemplate;
+                final String addKnownStatusIndexTemplate;
+                final String addAttributeSql;
+                //get the data base specific code for creating a new _instance table
+                switch (selectedPlatform) {
+                    case POSTGRESQL:
+                        addAttributeSql = "INSERT INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?) " + getConflictClause();
+                        addSsidTableTemplate = PostgresEamDbSettings.getCreateArtifactInstancesTableTemplate();
+                        addCaseIdIndexTemplate = PostgresEamDbSettings.getAddCaseIdIndexTemplate();
+                        addDataSourceIdIndexTemplate = PostgresEamDbSettings.getAddDataSourceIdIndexTemplate();
+                        addValueIndexTemplate = PostgresEamDbSettings.getAddValueIndexTemplate();
+                        addKnownStatusIndexTemplate = PostgresEamDbSettings.getAddKnownStatusIndexTemplate();
+                        break;
+                    case SQLITE:
+                        addAttributeSql = "INSERT OR IGNORE INTO correlation_types(id, display_name, db_table_name, supported, enabled) VALUES (?, ?, ?, ?, ?)";
+                        addSsidTableTemplate = SqliteEamDbSettings.getCreateArtifactInstancesTableTemplate();
+                        addCaseIdIndexTemplate = SqliteEamDbSettings.getAddCaseIdIndexTemplate();
+                        addDataSourceIdIndexTemplate = SqliteEamDbSettings.getAddDataSourceIdIndexTemplate();
+                        addValueIndexTemplate = SqliteEamDbSettings.getAddValueIndexTemplate();
+                        addKnownStatusIndexTemplate = SqliteEamDbSettings.getAddKnownStatusIndexTemplate();
+                        break;
+                    default:
+                        throw new EamDbException("Currently selected database platform \"" + selectedPlatform.name() + "\" can not be upgraded.");
+                }
+                final String wirelessNetworsDbTableName = "wireless_networks";
+                final String wirelessNetworksTableInstanceName = wirelessNetworsDbTableName + "_instances";
+                //add the wireless_networks attribute to the correlation_types table
+                preparedStatement = conn.prepareStatement(addAttributeSql);
+                preparedStatement.setInt(1, CorrelationAttributeInstance.SSID_TYPE_ID);
+                preparedStatement.setString(2, Bundle.CorrelationType_SSID_displayName());
+                preparedStatement.setString(3, wirelessNetworsDbTableName);
+                preparedStatement.setInt(4, 1);
+                preparedStatement.setInt(5, 1);
+                preparedStatement.execute();
+
+                //create a new wireless_networks_instances table and add indexes for its columns
+                statement.execute(String.format(addSsidTableTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
+                statement.execute(String.format(addCaseIdIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
+                statement.execute(String.format(addDataSourceIdIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
+                statement.execute(String.format(addValueIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
+                statement.execute(String.format(addKnownStatusIndexTemplate, wirelessNetworksTableInstanceName, wirelessNetworksTableInstanceName));
 
+            }
             if (!updateSchemaVersion(conn)) {
                 throw new EamDbException("Error updating schema version");
             }
@@ -3144,6 +3197,7 @@ public void upgradeSchema() throws EamDbException, SQLException {
             throw ex;
         } finally {
             EamDbUtil.closeResultSet(resultSet);
+            EamDbUtil.closeStatement(preparedStatement);
             EamDbUtil.closeStatement(statement);
             EamDbUtil.closeConnection(conn);
         }
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java
index c26134c5b8aeeb520f0087fe1cbb158d0eda6331..20845ff4472c5ee7edd282580a302b97336030fc 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeInstance.java
@@ -171,7 +171,7 @@ public void setCorrelationType(Type correlationType) {
      * Is this a database instance?
      *
      * @return True if the instance ID is greater or equal to zero; otherwise
-     * false.
+     *         false.
      */
     public boolean isDatabaseInstance() {
         return (ID >= 0);
@@ -234,7 +234,7 @@ public TskData.FileKnown getKnownStatus() {
      * as notable and should never be set to KNOWN.
      *
      * @param knownStatus Should be BAD if the item is tagged as notable,
-     * UNKNOWN otherwise
+     *                    UNKNOWN otherwise
      */
     public void setKnownStatus(TskData.FileKnown knownStatus) {
         this.knownStatus = knownStatus;
@@ -246,18 +246,20 @@ public void setKnownStatus(TskData.FileKnown knownStatus) {
     public static final int EMAIL_TYPE_ID = 2;
     public static final int PHONE_TYPE_ID = 3;
     public static final int USBID_TYPE_ID = 4;
+    public static final int SSID_TYPE_ID = 5;
 
     /**
      * Load the default correlation types
      *
      * @throws EamDbException if the Type's dbTableName has invalid
-     * characters/format
+     *                        characters/format
      */
     @Messages({"CorrelationType.FILES.displayName=Files",
         "CorrelationType.DOMAIN.displayName=Domains",
         "CorrelationType.EMAIL.displayName=Email Addresses",
         "CorrelationType.PHONE.displayName=Phone Numbers",
-        "CorrelationType.USBID.displayName=USB Devices"})
+        "CorrelationType.USBID.displayName=USB Devices",
+        "CorrelationType.SSID.displayName=Wireless Networks"})
     public static List<CorrelationAttributeInstance.Type> getDefaultCorrelationTypes() throws EamDbException {
         List<CorrelationAttributeInstance.Type> DEFAULT_CORRELATION_TYPES = new ArrayList<>();
         DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(FILES_TYPE_ID, Bundle.CorrelationType_FILES_displayName(), "file", true, true)); // NON-NLS
@@ -265,6 +267,7 @@ public static List<CorrelationAttributeInstance.Type> getDefaultCorrelationTypes
         DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(EMAIL_TYPE_ID, Bundle.CorrelationType_EMAIL_displayName(), "email_address", true, true)); // NON-NLS
         DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(PHONE_TYPE_ID, Bundle.CorrelationType_PHONE_displayName(), "phone_number", true, true)); // NON-NLS
         DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(USBID_TYPE_ID, Bundle.CorrelationType_USBID_displayName(), "usb_devices", true, true)); // NON-NLS
+        DEFAULT_CORRELATION_TYPES.add(new CorrelationAttributeInstance.Type(SSID_TYPE_ID, Bundle.CorrelationType_SSID_displayName(), "wireless_networks", true, true)); // NON-NLS
         return DEFAULT_CORRELATION_TYPES;
     }
 
@@ -283,13 +286,14 @@ public static class Type implements Serializable { // NOPMD Avoid short class na
 
         /**
          *
-         * @param typeId Unique ID for this Correlation Type
+         * @param typeId      Unique ID for this Correlation Type
          * @param displayName Name of this type displayed in the UI.
          * @param dbTableName Central repository db table where data of this
-         * type is stored. Must start with a lowercase letter and only contain
-         * lowercase letters, numbers, and '_' characters.
-         * @param supported Is this Type currently supported
-         * @param enabled Is this Type currently enabled.
+         *                    type is stored. Must start with a lowercase letter
+         *                    and only contain lowercase letters, numbers, and
+         *                    '_' characters.
+         * @param supported   Is this Type currently supported
+         * @param enabled     Is this Type currently enabled.
          */
         public Type(int typeId, String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException {
             if (dbTableName == null) {
@@ -312,10 +316,11 @@ public Type(int typeId, String displayName, String dbTableName, Boolean supporte
          *
          * @param displayName Name of this type displayed in the UI.
          * @param dbTableName Central repository db table where data of this
-         * type is stored Must start with a lowercase letter and only contain
-         * lowercase letters, numbers, and '_' characters.
-         * @param supported Is this Type currently supported
-         * @param enabled Is this Type currently enabled.
+         *                    type is stored Must start with a lowercase letter
+         *                    and only contain lowercase letters, numbers, and
+         *                    '_' characters.
+         * @param supported   Is this Type currently supported
+         * @param enabled     Is this Type currently enabled.
          */
         public Type(String displayName, String dbTableName, Boolean supported, Boolean enabled) throws EamDbException {
             this(-1, displayName, dbTableName, supported, enabled);
@@ -477,8 +482,8 @@ public String getDbTableName() {
          * custom_instances)
          *
          * @param dbTableName the dbTableName to set. Must start with lowercase
-         * letter and can only contain lowercase letters, numbers, and '_'
-         * characters.
+         *                    letter and can only contain lowercase letters,
+         *                    numbers, and '_' characters.
          *
          * @throws EamDbException if dbTableName contains invalid characters
          */
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java
index 772e1c517ec89765d4db369ec06df3a9fa8f3379..4ce04769c8d2517501cff2bd130d2cf3bb4374eb 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/CorrelationAttributeNormalizer.java
@@ -63,6 +63,8 @@ public static String normalize(CorrelationAttributeInstance.Type attributeType,
                 return normalizePhone(data);
             case CorrelationAttributeInstance.USBID_TYPE_ID:
                 return normalizeUsbId(data);
+            case CorrelationAttributeInstance.SSID_TYPE_ID:
+                return data;
             default:
                 final String errorMessage = String.format(
                         "Validator function not found for attribute type: %s", 
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java
index 30d539e87fb53bf222689ed04ce86693976b6a3c..aca04713457b213e5507415884e2d3f8d10fe5cc 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamArtifactUtil.java
@@ -54,9 +54,9 @@ public static String getEmailAddressAttrString() {
      * EamArtifact with a single EamArtifactInstance within. If not, return
      * null.
      *
-     * @param bbArtifact BlackboardArtifact to examine
+     * @param bbArtifact   BlackboardArtifact to examine
      * @param checkEnabled If true, only create a CorrelationAttribute if it is
-     * enabled
+     *                     enabled
      *
      * @return List of EamArtifacts
      */
@@ -93,10 +93,10 @@ public static List<CorrelationAttributeInstance> makeInstancesFromBlackboardArti
      * based on the data in the blackboard artifact.
      *
      * @param correlationType The Central Repository artifact type to create
-     * @param bbArtifact The blackboard artifact to pull data from
+     * @param bbArtifact      The blackboard artifact to pull data from
      *
      * @return the new EamArtifact, or null if one was not created because
-     * bbArtifact did not contain the needed data
+     *         bbArtifact did not contain the needed data
      */
     private static CorrelationAttributeInstance makeInstanceFromBlackboardArtifact(CorrelationAttributeInstance.Type correlationType,
             BlackboardArtifact bbArtifact) throws EamDbException {
@@ -159,13 +159,14 @@ private static CorrelationAttributeInstance makeInstanceFromBlackboardArtifact(C
                         return null;
                     }
                 }
-
             } else if (correlationType.getId() == CorrelationAttributeInstance.USBID_TYPE_ID
                     && BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_ATTACHED.getTypeID() == artifactTypeID) {
 
                 value = bbArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DEVICE_ID)).getValueString();
+            } else if (correlationType.getId() == CorrelationAttributeInstance.SSID_TYPE_ID
+                    && BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID() == artifactTypeID) {
+                value = bbArtifact.getAttribute(new BlackboardAttribute.Type(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SSID)).getValueString();
             }
-
         } catch (TskCoreException ex) {
             logger.log(Level.SEVERE, "Error getting attribute while getting type from BlackboardArtifact.", ex); // NON-NLS
             return null;
@@ -185,9 +186,10 @@ private static CorrelationAttributeInstance makeInstanceFromBlackboardArtifact(C
      * Uses the determined type and vallue, then looks up instance details to
      * create proper CorrelationAttributeInstance.
      *
-     * @param bbArtifact the blackboard artifatc
+     * @param bbArtifact      the blackboard artifact
      * @param correlationType the given type
-     * @param value the artifact value
+     * @param value           the artifact value
+     *
      * @return CorrelationAttributeInstance from details
      */
     private static CorrelationAttributeInstance makeCorrelationAttributeInstanceUsingTypeValue(BlackboardArtifact bbArtifact, CorrelationAttributeInstance.Type correlationType, String value) {
@@ -340,7 +342,7 @@ public static CorrelationAttributeInstance makeInstanceFromContent(Content conte
      * @param file The file to test
      *
      * @return true if the file should be added to the central repo, false
-     * otherwise
+     *         otherwise
      */
     public static boolean isSupportedAbstractFileType(AbstractFile file) {
         if (file == null) {
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java
index c7e385928d65fc34a0c00f9e4ce996554b334e03..418181d8ab2e1679554afb337e994c3b1a3dbe9d 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/EamDb.java
@@ -31,9 +31,9 @@
  */
 public interface EamDb {
 
-    public static final int SCHEMA_VERSION = 1;
+    public static final int SCHEMA_VERSION = 2;
     public static final CaseDbSchemaVersionNumber CURRENT_DB_SCHEMA_VERSION
-            = new CaseDbSchemaVersionNumber(1, 1);
+            = new CaseDbSchemaVersionNumber(1, 2);
   
     
     /**
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java
index 77ab8c23db4a9e7e03bca1b0c3cb8ffa9d75f316..6ee454f915ff128ea7f9625674d735b86c106f3f 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/PostgresEamDbSettings.java
@@ -35,8 +35,9 @@
 
 /**
  * Settings for the Postgres implementation of the Central Repository database
- * 
- * NOTE: This is public scope because the options panel calls it directly to set/get
+ *
+ * NOTE: This is public scope because the options panel calls it directly to
+ * set/get
  */
 public final class PostgresEamDbSettings {
 
@@ -266,7 +267,7 @@ public boolean createDatabase() {
         return true;
 
     }
-    
+
     public boolean deleteDatabase() {
         Connection conn = getEphemeralConnection(true);
         if (null == conn) {
@@ -391,26 +392,13 @@ public boolean initializeDatabaseSchema() {
         createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)");
         createCorrelationTypesTable.append(")");
 
-        // Each "%s" will be replaced with the relevant TYPE_instances table name.
-        StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
-        createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
-        createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,");
-        createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
-        createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
-        createArtifactInstancesTableTemplate.append("value text NOT NULL,");
-        createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
-        createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
-        createArtifactInstancesTableTemplate.append("comment text,");
-        createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),");
-        createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
-        createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
-        createArtifactInstancesTableTemplate.append(")");
+        String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate();
 
-        // Each "%s" will be replaced with the relevant TYPE_instances table name.
-        String instancesIdx1 = "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
-        String instancesIdx2 = "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
-        String instancesIdx3 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
-        String instancesIdx4 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
+        String instancesIdx1 = getAddCaseIdIndexTemplate();
+        String instancesIdx2 = getAddDataSourceIdIndexTemplate();
+
+        String instancesIdx3 = getAddValueIndexTemplate();
+        String instancesIdx4 = getAddKnownStatusIndexTemplate();
 
         StringBuilder createDbInfoTable = new StringBuilder();
         createDbInfoTable.append("CREATE TABLE IF NOT EXISTS db_info (");
@@ -447,14 +435,14 @@ public boolean initializeDatabaseSchema() {
 
             // Create a separate instance and reference table for each correlation type
             List<CorrelationAttributeInstance.Type> DEFAULT_CORRELATION_TYPES = CorrelationAttributeInstance.getDefaultCorrelationTypes();
-            
+
             String reference_type_dbname;
             String instance_type_dbname;
             for (CorrelationAttributeInstance.Type type : DEFAULT_CORRELATION_TYPES) {
                 reference_type_dbname = EamDbUtil.correlationTypeToReferenceTableName(type);
                 instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type);
-                
-                stmt.execute(String.format(createArtifactInstancesTableTemplate.toString(), instance_type_dbname, instance_type_dbname));
+
+                stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
                 stmt.execute(String.format(instancesIdx1, instance_type_dbname, instance_type_dbname));
                 stmt.execute(String.format(instancesIdx2, instance_type_dbname, instance_type_dbname));
                 stmt.execute(String.format(instancesIdx3, instance_type_dbname, instance_type_dbname));
@@ -465,7 +453,7 @@ public boolean initializeDatabaseSchema() {
                     stmt.execute(String.format(createReferenceTypesTableTemplate.toString(), reference_type_dbname, reference_type_dbname));
                     stmt.execute(String.format(referenceTypesIdx1, reference_type_dbname, reference_type_dbname));
                     stmt.execute(String.format(referenceTypesIdx2, reference_type_dbname, reference_type_dbname));
-                }                
+                }
             }
 
         } catch (SQLException ex) {
@@ -480,6 +468,83 @@ public boolean initializeDatabaseSchema() {
         return true;
     }
 
+    /**
+     * Get the template String for creating a new _instances table in a Postgres
+     * central repository. %s will exist in the template where the name of the
+     * new table will be addedd.
+     *
+     * @return a String which is a template for cretating a new _instances table
+     */
+    static String getCreateArtifactInstancesTableTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
+        createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
+        createArtifactInstancesTableTemplate.append("id SERIAL PRIMARY KEY,");
+        createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
+        createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
+        createArtifactInstancesTableTemplate.append("value text NOT NULL,");
+        createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
+        createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
+        createArtifactInstancesTableTemplate.append("comment text,");
+        createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique_ UNIQUE (data_source_id, value, file_path),");
+        createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
+        createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
+        createArtifactInstancesTableTemplate.append(")");
+        return createArtifactInstancesTableTemplate.toString();
+    }
+
+    /**
+     * Get the template for creating an index on the case_id column of an
+     * instance table. %s will exist in the template where the name of the new
+     * table will be addedd.
+     *
+     * @return a String which is a template for adding an index to the case_id
+     *         column of a _instances table
+     */
+    static String getAddCaseIdIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
+    }
+
+    /**
+     * Get the template for creating an index on the data_source_id column of an
+     * instance table. %s will exist in the template where the name of the new
+     * table will be addedd.
+     *
+     * @return a String which is a template for adding an index to the
+     *         data_source_id column of a _instances table
+     */
+    static String getAddDataSourceIdIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
+    }
+
+    /**
+     * Get the template for creating an index on the value column of an instance
+     * table. %s will exist in the template where the name of the new table will
+     * be addedd.
+     *
+     * @return a String which is a template for adding an index to the value
+     *         column of a _instances table
+     */
+    static String getAddValueIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
+    }
+
+    /**
+     * Get the template for creating an index on the known_status column of an
+     * instance table. %s will exist in the template where the name of the new
+     * table will be addedd.
+     *
+     * @return a String which is a template for adding an index to the
+     *         known_status column of a _instances table
+     */
+    static String getAddKnownStatusIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
+    }
+
     public boolean insertDefaultDatabaseContent() {
         Connection conn = getEphemeralConnection(false);
         if (null == conn) {
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java
index 4894a570e6abdad9e6f81f1b3e23f17ad22bb931..615e49e523551c0bbcaecf5e7f9fc230818e13b5 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/datamodel/SqliteEamDbSettings.java
@@ -35,8 +35,9 @@
 
 /**
  * Settings for the sqlite implementation of the Central Repository database
- * 
- * NOTE: This is public scope because the options panel calls it directly to set/get 
+ *
+ * NOTE: This is public scope because the options panel calls it directly to
+ * set/get
  */
 public final class SqliteEamDbSettings {
 
@@ -95,7 +96,7 @@ public void saveSettings() {
         ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.dbDirectory", getDbDirectory()); // NON-NLS
         ModuleSettings.setConfigSetting("CentralRepository", "db.sqlite.bulkThreshold", Integer.toString(getBulkThreshold())); // NON-NLS
     }
-    
+
     /**
      * Verify that the db file exists.
      *
@@ -103,11 +104,11 @@ public void saveSettings() {
      */
     public boolean dbFileExists() {
         File dbFile = new File(getFileNameWithPath());
-        if(! dbFile.exists()){
+        if (!dbFile.exists()) {
             return false;
         }
         // It's unlikely, but make sure the file isn't actually a directory
-        return ( ! dbFile.isDirectory());
+        return (!dbFile.isDirectory());
     }
 
     /**
@@ -148,10 +149,11 @@ public boolean createDbDirectory() {
 
         return true;
     }
-    
+
     /**
      * Delete the database
-     * @return 
+     *
+     * @return
      */
     public boolean deleteDatabase() {
         File dbFile = new File(this.getFileNameWithPath());
@@ -333,26 +335,13 @@ public boolean initializeDatabaseSchema() {
         createCorrelationTypesTable.append("CONSTRAINT correlation_types_names UNIQUE (display_name, db_table_name)");
         createCorrelationTypesTable.append(")");
 
-        // Each "%s" will be replaced with the relevant TYPE_instances table name.
-        StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
-        createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
-        createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,");
-        createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
-        createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
-        createArtifactInstancesTableTemplate.append("value text NOT NULL,");
-        createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
-        createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
-        createArtifactInstancesTableTemplate.append("comment text,");
-        createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,");
-        createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
-        createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
-        createArtifactInstancesTableTemplate.append(")");
+        String createArtifactInstancesTableTemplate = getCreateArtifactInstancesTableTemplate();
 
-        // Each "%s" will be replaced with the relevant TYPE_instances table name.
-        String instancesIdx1 = "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
-        String instancesIdx2 = "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
-        String instancesIdx3 = "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
-        String instancesIdx4 = "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
+        String instancesIdx1 = getAddCaseIdIndexTemplate();
+        String instancesIdx2 = getAddDataSourceIdIndexTemplate();
+
+        String instancesIdx3 = getAddValueIndexTemplate();
+        String instancesIdx4 = getAddKnownStatusIndexTemplate();
 
         StringBuilder createDbInfoTable = new StringBuilder();
         createDbInfoTable.append("CREATE TABLE IF NOT EXISTS db_info (");
@@ -402,7 +391,7 @@ public boolean initializeDatabaseSchema() {
                 reference_type_dbname = EamDbUtil.correlationTypeToReferenceTableName(type);
                 instance_type_dbname = EamDbUtil.correlationTypeToInstanceTableName(type);
 
-                stmt.execute(String.format(createArtifactInstancesTableTemplate.toString(), instance_type_dbname, instance_type_dbname));
+                stmt.execute(String.format(createArtifactInstancesTableTemplate, instance_type_dbname, instance_type_dbname));
                 stmt.execute(String.format(instancesIdx1, instance_type_dbname, instance_type_dbname));
                 stmt.execute(String.format(instancesIdx2, instance_type_dbname, instance_type_dbname));
                 stmt.execute(String.format(instancesIdx3, instance_type_dbname, instance_type_dbname));
@@ -426,6 +415,83 @@ public boolean initializeDatabaseSchema() {
         }
         return true;
     }
+    
+    /**
+     * Get the template String for creating a new _instances table in a Sqlite
+     * central repository. %s will exist in the template where the name of the
+     * new table will be addedd.
+     *
+     * @return a String which is a template for cretating a new _instances table
+     */
+    static String getCreateArtifactInstancesTableTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        StringBuilder createArtifactInstancesTableTemplate = new StringBuilder();
+        createArtifactInstancesTableTemplate.append("CREATE TABLE IF NOT EXISTS %s (");
+        createArtifactInstancesTableTemplate.append("id integer primary key autoincrement NOT NULL,");
+        createArtifactInstancesTableTemplate.append("case_id integer NOT NULL,");
+        createArtifactInstancesTableTemplate.append("data_source_id integer NOT NULL,");
+        createArtifactInstancesTableTemplate.append("value text NOT NULL,");
+        createArtifactInstancesTableTemplate.append("file_path text NOT NULL,");
+        createArtifactInstancesTableTemplate.append("known_status integer NOT NULL,");
+        createArtifactInstancesTableTemplate.append("comment text,");
+        createArtifactInstancesTableTemplate.append("CONSTRAINT %s_multi_unique UNIQUE(data_source_id, value, file_path) ON CONFLICT IGNORE,");
+        createArtifactInstancesTableTemplate.append("foreign key (case_id) references cases(id) ON UPDATE SET NULL ON DELETE SET NULL,");
+        createArtifactInstancesTableTemplate.append("foreign key (data_source_id) references data_sources(id) ON UPDATE SET NULL ON DELETE SET NULL");
+        createArtifactInstancesTableTemplate.append(")");
+        return createArtifactInstancesTableTemplate.toString();
+    }
+
+    /**
+     * Get the template for creating an index on the case_id column of an
+     * instance table. %s will exist in the template where the name of the new
+     * table will be addedd.
+     *
+     * @return a String which is a template for adding an index to the case_id
+     *         column of a _instances table
+     */
+    static String getAddCaseIdIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_case_id ON %s (case_id)";
+    }
+
+    /**
+     * Get the template for creating an index on the data_source_id column of an
+     * instance table. %s will exist in the template where the name of the new
+     * table will be addedd.
+     *
+     * @return a String which is a template for adding an index to the
+     *         data_source_id column of a _instances table
+     */
+    static String getAddDataSourceIdIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_data_source_id ON %s (data_source_id)";
+    }
+
+    /**
+     * Get the template for creating an index on the value column of an instance
+     * table. %s will exist in the template where the name of the new table will
+     * be addedd.
+     *
+     * @return a String which is a template for adding an index to the value
+     *         column of a _instances table
+     */
+    static String getAddValueIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_value ON %s (value)";
+    }
+
+    /**
+     * Get the template for creating an index on the known_status column of an
+     * instance table. %s will exist in the template where the name of the new
+     * table will be addedd.
+     *
+     * @return a String which is a template for adding an index to the
+     *         known_status column of a _instances table
+     */
+    static String getAddKnownStatusIndexTemplate() {
+        // Each "%s" will be replaced with the relevant TYPE_instances table name.
+        return "CREATE INDEX IF NOT EXISTS %s_value_known_status ON %s (value, known_status)";
+    }
 
     public boolean insertDefaultDatabaseContent() {
         Connection conn = getEphemeralConnection();
@@ -490,8 +556,6 @@ void setBulkThreshold(int bulkThreshold) throws EamDbException {
         }
     }
 
-
-
     /**
      * @return the dbDirectory
      */
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java
index ccca659574281964d3b3a7b19f44891367126751..2f2a35ab2ac50d56cab1ad36f90e2815f8c97b79 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/eventlisteners/IngestEventsListener.java
@@ -183,7 +183,10 @@ private class IngestModuleEventListener implements PropertyChangeListener {
 
         @Override
         public void propertyChange(PropertyChangeEvent evt) {
-            if (getCeModuleInstanceCount() > 0) {
+            //if ingest is running we want there to check if there is a Correlation Engine module running 
+            //sometimes artifacts are generated by DSPs or other sources while ingest is not running
+            //in these cases we still want to create correlation attributes for those artifacts when appropriate
+            if (!IngestManager.getInstance().isIngestRunning() || getCeModuleInstanceCount() > 0) {
                 EamDb dbManager;
                 try {
                     dbManager = EamDb.getInstance();
@@ -193,7 +196,9 @@ public void propertyChange(PropertyChangeEvent evt) {
                 }
                 switch (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName())) {
                     case DATA_ADDED: {
-                        jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, isFlagNotableItems()));
+                        //if ingest isn't running create the interesting items otherwise use the ingest module setting to determine if we create interesting items
+                        boolean flagNotable = !IngestManager.getInstance().isIngestRunning() || isFlagNotableItems();
+                        jobProcessingExecutor.submit(new DataAddedTask(dbManager, evt, flagNotable));
                         break;
                     }
                 }
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java
index 21baf59454711b47f48227ce03d3e8efe47541ea..7469e38bd0e2d6879e2aa2c4fb54b3813184fca9 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/ingestmodule/IngestModule.java
@@ -71,7 +71,6 @@ final class IngestModule implements FileIngestModule {
     private CorrelationDataSource eamDataSource;
     private Blackboard blackboard;
     private CorrelationAttributeInstance.Type filesType;
-
     private final boolean flagTaggedNotableItems;
 
     /**
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties
index 0fc19515932f0ecbac0f63346ed150f8a45866a6..aae9fa321db4998b2449f64db9c29f12a6347461 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties
@@ -34,7 +34,7 @@ AddNewOrganizationDialog.bnOK.text=OK
 AddNewOrganizationDialog.tfName.tooltip=POC Name
 ManageTagsDialog.okButton.text=OK
 ManageTagsDialog.cancelButton.text=Cancel
-ManageArtifactTypesDialog.taInstructionsMsg.text=Enable one or more correlation properties to use for correlation during ingest. Note, these properties are global and impact all users of the central repository.
+ManageArtifactTypesDialog.taInstructionsMsg.text=Enable one or more correlation properties to use for correlation during ingest. Note, these properties are global and impact all users of the Central Repository.
 EamSqliteSettingsDialog.bnOk.text=OK
 EamPostgresSettingsDialog.bnSave.text=Save
 EamDbSettingsDialog.bnDatabasePathFileOpen.text=Browse...
@@ -58,11 +58,10 @@ ManageCorrelationPropertiesDialog.okButton.text=OK
 GlobalSettingsPanel.bnManageProperties.text=Manage Correlation Properties
 EamDbSettingsDialog.lbDatabaseDesc.text=Database File:
 EamDbSettingsDialog.lbFullDbPath.text=
-GlobalSettingsPanel.cbUseCentralRepo.text=Use a central repository
-GlobalSettingsPanel.correlationPropertiesTextArea.text=Choose which file and result properties to store in the central repository for later correlation.\n
-GlobalSettingsPanel.organizationTextArea.text=Organization information can be tracked in the central repository.
+GlobalSettingsPanel.cbUseCentralRepo.text=Use a Central Repository
+GlobalSettingsPanel.organizationTextArea.text=Organization information can be tracked in the Central Repository.
 GlobalSettingsPanel.manageOrganizationButton.text=Manage Organizations
-GlobalSettingsPanel.lbCentralRepository.text=A central repository allows you to correlate files and results between cases.
+GlobalSettingsPanel.lbCentralRepository.text=A Central Repository allows you to correlate files and results between cases.
 GlobalSettingsPanel.pnCorrelationProperties.border.title=Correlation Properties
 GlobalSettingsPanel.organizationPanel.border.title=Organizations
 GlobalSettingsPanel.casesPanel.border.title=Case Details
@@ -74,8 +73,9 @@ ShowCasesDialog.caseDetailsTable.toolTipText=Click column name to sort. Right-cl
 ShowCasesDialog.title=Case Details
 GlobalSettingsPanel.Case\ Details.AccessibleContext.accessibleName=Cases Details
 ShowCasesDialog.caseDetailsTable.AccessibleContext.accessibleDescription=Click column name to sort.
-GlobalSettingsPanel.casesTextArea.text=Display table that lists central repository case details.
-GlobalSettingsPanel.ingestRunningWarningLabel.text=Cannot make changes to central repository settings when ingest is running!
+GlobalSettingsPanel.casesTextArea.text=Display table that lists Central Repository case details.
+GlobalSettingsPanel.ingestRunningWarningLabel.text=Cannot make changes to Central Repository settings when ingest is running!
+GlobalSettingsPanel.correlationPropertiesTextArea.text=Choose which file and result properties to store in the Central Repository for later correlation.\n
 ManageCasesDialog.examinerPhoneLabel.text=Examiner Phone:
 ManageCasesDialog.examinerNameLabel.text=Examiner Name:
 ManageCasesDialog.examinerEmailLabel.text=Examiner Email:
@@ -84,4 +84,4 @@ ManageCasesDialog.orgLabel.text=Organization:
 ManageCasesDialog.closeButton.text=Close
 ManageCasesDialog.notesLabel.text=Notes:
 ManageCasesDialog.dataSourcesLabel.text=Data Sources:
-ManageCasesDialog.caseInfoLabel.text=Case Info:
+ManageCasesDialog.caseInfoLabel.text=Case Info:
\ No newline at end of file
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java
index c504f666e48602bda4d2487e497247913e7773e4..826e66ecbc0d65e822eb4a524d863239af817035 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/EamDbSettingsDialog.java
@@ -102,7 +102,7 @@ public boolean accept(File pathname) {
 
             @Override
             public String getDescription() {
-                return "Directories and central repository databases";
+                return "Directories and Central Repository databases";
             }
         });
         cbDatabaseType.setSelectedItem(selectedPlatform);
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form
index c3a8f678d608f20eafcd2a4f7d0ebe84def20e36..0a8c7dcc64e9ad384e2b9b1dff56d7c78c295681 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.form
@@ -57,7 +57,7 @@
           <Layout>
             <DimensionLayout dim="0">
               <Group type="103" groupAlignment="0" attributes="0">
-                  <Component id="lbCentralRepository" pref="1022" max="32767" attributes="0"/>
+                  <Component id="lbCentralRepository" max="32767" attributes="0"/>
                   <Group type="102" attributes="0">
                       <Group type="103" groupAlignment="0" attributes="0">
                           <Component id="pnDatabaseConfiguration" alignment="0" max="32767" attributes="0"/>
@@ -67,7 +67,7 @@
                           <Group type="102" attributes="0">
                               <Component id="cbUseCentralRepo" min="-2" pref="162" max="-2" attributes="0"/>
                               <EmptySpace type="unrelated" max="-2" attributes="0"/>
-                              <Component id="ingestRunningWarningLabel" max="32767" attributes="0"/>
+                              <Component id="ingestRunningWarningLabel" pref="844" max="32767" attributes="0"/>
                           </Group>
                           <Group type="102" alignment="0" attributes="0">
                               <EmptySpace max="-2" attributes="0"/>
@@ -89,13 +89,13 @@
                       </Group>
                       <EmptySpace type="unrelated" max="-2" attributes="0"/>
                       <Component id="pnDatabaseConfiguration" min="-2" max="-2" attributes="0"/>
-                      <EmptySpace max="-2" attributes="0"/>
+                      <EmptySpace min="0" pref="0" max="-2" attributes="0"/>
                       <Component id="pnCorrelationProperties" min="-2" max="-2" attributes="0"/>
-                      <EmptySpace max="-2" attributes="0"/>
+                      <EmptySpace min="0" pref="0" max="-2" attributes="0"/>
                       <Component id="organizationPanel" min="-2" max="-2" attributes="0"/>
-                      <EmptySpace max="-2" attributes="0"/>
+                      <EmptySpace min="0" pref="0" max="-2" attributes="0"/>
                       <Component id="casesPanel" min="-2" max="-2" attributes="0"/>
-                      <EmptySpace max="-2" attributes="0"/>
+                      <EmptySpace min="0" pref="0" max="-2" attributes="0"/>
                       <Component id="tbOops" min="-2" max="-2" attributes="0"/>
                       <EmptySpace max="-2" attributes="0"/>
                   </Group>
@@ -242,14 +242,14 @@
               <Layout>
                 <DimensionLayout dim="0">
                   <Group type="103" groupAlignment="0" attributes="0">
-                      <Group type="102" attributes="0">
-                          <EmptySpace max="-2" attributes="0"/>
+                      <Group type="102" alignment="0" attributes="0">
+                          <EmptySpace min="-2" max="-2" attributes="0"/>
                           <Group type="103" groupAlignment="0" attributes="0">
-                              <Component id="correlationPropertiesScrollPane" max="32767" attributes="0"/>
-                              <Group type="102" alignment="0" attributes="0">
+                              <Group type="102" attributes="0">
                                   <Component id="bnManageTypes" min="-2" max="-2" attributes="0"/>
                                   <EmptySpace min="0" pref="0" max="32767" attributes="0"/>
                               </Group>
+                              <Component id="correlationPropertiesScrollPane" max="32767" attributes="0"/>
                           </Group>
                           <EmptySpace max="-2" attributes="0"/>
                       </Group>
@@ -258,8 +258,7 @@
                 <DimensionLayout dim="1">
                   <Group type="103" groupAlignment="0" attributes="0">
                       <Group type="102" alignment="1" attributes="0">
-                          <EmptySpace min="-2" pref="7" max="-2" attributes="0"/>
-                          <Component id="correlationPropertiesScrollPane" min="-2" max="-2" attributes="0"/>
+                          <Component id="correlationPropertiesScrollPane" pref="32" max="32767" attributes="0"/>
                           <EmptySpace max="-2" attributes="0"/>
                           <Component id="bnManageTypes" min="-2" max="-2" attributes="0"/>
                           <EmptySpace min="-2" pref="8" max="-2" attributes="0"/>
@@ -301,7 +300,7 @@
                           <Font name="Tahoma" size="11" style="0"/>
                         </Property>
                         <Property name="lineWrap" type="boolean" value="true"/>
-                        <Property name="rows" type="int" value="2"/>
+                        <Property name="rows" type="int" value="1"/>
                         <Property name="text" type="java.lang.String" editor="org.netbeans.modules.i18n.form.FormI18nStringEditor">
                           <ResourceString bundle="org/sleuthkit/autopsy/centralrepository/optionspanel/Bundle.properties" key="GlobalSettingsPanel.correlationPropertiesTextArea.text" replaceFormat="org.openide.util.NbBundle.getMessage({sourceFileName}.class, &quot;{key}&quot;)"/>
                         </Property>
@@ -334,7 +333,7 @@
                       <Group type="102" attributes="0">
                           <EmptySpace max="-2" attributes="0"/>
                           <Group type="103" groupAlignment="0" attributes="0">
-                              <Component id="organizationScrollPane" pref="992" max="32767" attributes="0"/>
+                              <Component id="organizationScrollPane" max="32767" attributes="0"/>
                               <Group type="102" attributes="0">
                                   <Component id="manageOrganizationButton" min="-2" max="-2" attributes="0"/>
                                   <EmptySpace min="0" pref="0" max="32767" attributes="0"/>
diff --git a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java
index e24fe70ea8d551f7699fe142decbc96b6356a56e..a4d6ef5cc0e1dc06f455cd4abbed75890a7d2b13 100644
--- a/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/centralrepository/optionspanel/GlobalSettingsPanel.java
@@ -222,7 +222,7 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
         correlationPropertiesTextArea.setColumns(20);
         correlationPropertiesTextArea.setFont(new java.awt.Font("Tahoma", 0, 11)); // NOI18N
         correlationPropertiesTextArea.setLineWrap(true);
-        correlationPropertiesTextArea.setRows(2);
+        correlationPropertiesTextArea.setRows(1);
         correlationPropertiesTextArea.setText(org.openide.util.NbBundle.getMessage(GlobalSettingsPanel.class, "GlobalSettingsPanel.correlationPropertiesTextArea.text")); // NOI18N
         correlationPropertiesTextArea.setToolTipText("");
         correlationPropertiesTextArea.setWrapStyleWord(true);
@@ -236,17 +236,16 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
             .addGroup(pnCorrelationPropertiesLayout.createSequentialGroup()
                 .addContainerGap()
                 .addGroup(pnCorrelationPropertiesLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
-                    .addComponent(correlationPropertiesScrollPane)
                     .addGroup(pnCorrelationPropertiesLayout.createSequentialGroup()
                         .addComponent(bnManageTypes)
-                        .addGap(0, 0, Short.MAX_VALUE)))
+                        .addGap(0, 0, Short.MAX_VALUE))
+                    .addComponent(correlationPropertiesScrollPane))
                 .addContainerGap())
         );
         pnCorrelationPropertiesLayout.setVerticalGroup(
             pnCorrelationPropertiesLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
             .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, pnCorrelationPropertiesLayout.createSequentialGroup()
-                .addGap(7, 7, 7)
-                .addComponent(correlationPropertiesScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
+                .addComponent(correlationPropertiesScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 32, Short.MAX_VALUE)
                 .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
                 .addComponent(bnManageTypes)
                 .addGap(8, 8, 8))
@@ -281,7 +280,7 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
             .addGroup(organizationPanelLayout.createSequentialGroup()
                 .addContainerGap()
                 .addGroup(organizationPanelLayout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
-                    .addComponent(organizationScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 992, Short.MAX_VALUE)
+                    .addComponent(organizationScrollPane)
                     .addGroup(organizationPanelLayout.createSequentialGroup()
                         .addComponent(manageOrganizationButton)
                         .addGap(0, 0, Short.MAX_VALUE)))
@@ -356,7 +355,7 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
         jPanel1.setLayout(jPanel1Layout);
         jPanel1Layout.setHorizontalGroup(
             jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
-            .addComponent(lbCentralRepository, javax.swing.GroupLayout.DEFAULT_SIZE, 1022, Short.MAX_VALUE)
+            .addComponent(lbCentralRepository, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
             .addGroup(jPanel1Layout.createSequentialGroup()
                 .addGroup(jPanel1Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
                     .addComponent(pnDatabaseConfiguration, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
@@ -366,7 +365,7 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
                     .addGroup(jPanel1Layout.createSequentialGroup()
                         .addComponent(cbUseCentralRepo, javax.swing.GroupLayout.PREFERRED_SIZE, 162, javax.swing.GroupLayout.PREFERRED_SIZE)
                         .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
-                        .addComponent(ingestRunningWarningLabel, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
+                        .addComponent(ingestRunningWarningLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 844, Short.MAX_VALUE))
                     .addGroup(jPanel1Layout.createSequentialGroup()
                         .addContainerGap()
                         .addComponent(tbOops, javax.swing.GroupLayout.PREFERRED_SIZE, 974, javax.swing.GroupLayout.PREFERRED_SIZE)))
@@ -382,13 +381,13 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
                     .addComponent(ingestRunningWarningLabel))
                 .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
                 .addComponent(pnDatabaseConfiguration, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
-                .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
+                .addGap(0, 0, 0)
                 .addComponent(pnCorrelationProperties, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
-                .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
+                .addGap(0, 0, 0)
                 .addComponent(organizationPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
-                .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
+                .addGap(0, 0, 0)
                 .addComponent(casesPanel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
-                .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
+                .addGap(0, 0, 0)
                 .addComponent(tbOops, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
                 .addContainerGap())
         );
diff --git a/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java b/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java
index 506084f0c3f251d8ffada69c1244f38575b9cfe6..dd73b6f6ca762b4ce1b91bfe9c1bc9af2f76b255 100644
--- a/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java
+++ b/Core/src/org/sleuthkit/autopsy/contentviewers/PListViewer.java
@@ -74,7 +74,7 @@ class PListViewer extends javax.swing.JPanel implements FileTypeViewer, Explorer
     private final Outline outline;
     private ExplorerManager explorerManager;
 
-    private NSDictionary rootDict;
+    private NSObject rootDict;
 
     /**
      * Creates new form PListViewer
@@ -415,22 +415,35 @@ private PropKeyValue parseProperty(final String key, final NSObject value) {
     }
 
     /**
-     * Parses given binary stream and extracts Plist key/value
+     * Parses given binary stream and extracts Plist key/value.
      *
-     * @param plistbytes
+     * @param plistbytes The byte array containing the Plist data.
      *
      * @return list of PropKeyValue
      */
     private List<PropKeyValue> parsePList(final byte[] plistbytes) throws IOException, PropertyListFormatException, ParseException, ParserConfigurationException, SAXException {
 
         final List<PropKeyValue> plist = new ArrayList<>();
-        rootDict = (NSDictionary) PropertyListParser.parse(plistbytes);
+        rootDict = PropertyListParser.parse(plistbytes);
 
-        final String[] keys = rootDict.allKeys();
-        for (final String key : keys) {
-            final PropKeyValue pkv = parseProperty(key, rootDict.objectForKey(key));
-            if (null != pkv) {
-                plist.add(pkv);
+        /*
+         * Parse the data if the root is an NSArray or NSDictionary. Anything
+         * else is unexpected and will be ignored.
+         */
+        if (rootDict instanceof NSArray) {
+            for (int i=0; i < ((NSArray)rootDict).count(); i++) {
+                final PropKeyValue pkv = parseProperty("", ((NSArray)rootDict).objectAtIndex(i));
+                if (null != pkv) {
+                    plist.add(pkv);
+                }
+            }
+        } else if (rootDict instanceof NSDictionary) {
+            final String[] keys = ((NSDictionary)rootDict).allKeys();
+            for (final String key : keys) {
+                final PropKeyValue pkv = parseProperty(key, ((NSDictionary)rootDict).objectForKey(key));
+                if (null != pkv) {
+                    plist.add(pkv);
+                }
             }
         }
 
diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java
index 2cccd31428ca5380088be351861d62c3bbc91370..39d3203b13fb9b334c6b0b35aadb4b79b2e40e4f 100644
--- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java
+++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataContentViewerArtifact.java
@@ -487,7 +487,8 @@ public int isPreferred(Node node) {
                 || (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID())
                 || (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_INTERESTING_FILE_HIT.getTypeID())
                 || (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_OBJECT_DETECTED.getTypeID())
-                || (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())) {        
+                || (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID())
+                || (artifact.getArtifactTypeID() == ARTIFACT_TYPE.TSK_EXT_MISMATCH_DETECTED.getTypeID())) {
             return 3;
         } else {
             return 6;
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/NetworkUtils.java b/Core/src/org/sleuthkit/autopsy/coreutils/NetworkUtils.java
index af41bc1980be3bd1a21f9e4ef56191013dcbff1c..78547f8370eead294b88975701f7115cecf517a8 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/NetworkUtils.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/NetworkUtils.java
@@ -1,7 +1,7 @@
 /*
  * Autopsy Forensic Browser
  *
- * Copyright 2012-2015 Basis Technology Corp.
+ * Copyright 2012-2018 Basis Technology Corp.
  * Contact: carrier <at> sleuthkit <dot> org
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
@@ -18,14 +18,22 @@
  */
 package org.sleuthkit.autopsy.coreutils;
 
+import java.net.MalformedURLException;
+import java.net.URL;
 import java.net.UnknownHostException;
+import java.util.StringTokenizer;
 
 public class NetworkUtils {
+    
+    private NetworkUtils() {
+    }
 
     /**
      * Set the host name variable. Sometimes the network can be finicky, so the
      * answer returned by getHostName() could throw an exception or be null.
      * Have it read the environment variable if getHostName() is unsuccessful.
+     * 
+     * @return the local host name
      */
     public static String getLocalHostName() {
         String hostName = "";
@@ -41,4 +49,78 @@ public static String getLocalHostName() {
         }
         return hostName;
     }
+    
+    /**
+     * Attempt to manually extract the domain from a URL.
+     * 
+     * @param url
+     * @return empty string if no domain could be found
+     */
+    private static String getBaseDomain(String url) {
+        String host = null;
+        
+        //strip protocol
+        String cleanUrl = url.replaceFirst(".*:\\/\\/", "");
+
+        //strip after slashes
+        String dirToks[] = cleanUrl.split("\\/");
+        if (dirToks.length > 0) {
+            host = dirToks[0];
+        } else {
+            host = cleanUrl;
+        }
+
+        //get the domain part from host (last 2)
+        StringTokenizer tok = new StringTokenizer(host, ".");
+        StringBuilder hostB = new StringBuilder();
+        int toks = tok.countTokens();
+
+        for (int count = 0; count < toks; ++count) {
+            String part = tok.nextToken();
+            int diff = toks - count;
+            if (diff < 3) {
+                hostB.append(part);
+            }
+            if (diff == 2) {
+                hostB.append(".");
+            }
+        }
+        
+        
+        String base = hostB.toString();
+        // verify there are no special characters in there
+        if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) {
+            return "";
+        }
+        return base;
+    }
+
+    /**
+     * Attempt to extract the domain from a URL.
+     * Will start by using the built-in URL class, and if that fails will
+     * try to extract it manually.
+     * 
+     * @param urlString The URL to extract the domain from
+     * @return empty string if no domain name was found
+     */
+    public static String extractDomain(String urlString) {
+        if (urlString == null) {
+            return "";
+        }
+        String result = "";
+
+        try {
+            URL url = new URL(urlString);
+            result = url.getHost();
+        } catch (MalformedURLException ex) {
+            //do not log if not a valid URL - we will try to extract it ourselves
+        }
+
+        //was not a valid URL, try a less picky method
+        if (result == null || result.trim().isEmpty()) {
+            return getBaseDomain(urlString);
+        }
+        return result;
+    }
+    
 }
diff --git a/Core/src/org/sleuthkit/autopsy/coreutils/TimeZoneUtils.java b/Core/src/org/sleuthkit/autopsy/coreutils/TimeZoneUtils.java
index ae9f1a7758346279289fb2b7ad4a245ae460c965..c844d9ce97b9c6e4326e805ff7fe93b88e465bc8 100644
--- a/Core/src/org/sleuthkit/autopsy/coreutils/TimeZoneUtils.java
+++ b/Core/src/org/sleuthkit/autopsy/coreutils/TimeZoneUtils.java
@@ -21,6 +21,8 @@
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
 import java.util.GregorianCalendar;
 import java.util.List;
 import java.util.SimpleTimeZone;
@@ -45,7 +47,7 @@ public static String convertToAlphaNumericFormat(String timeZoneId) {
         java.util.TimeZone zone = java.util.TimeZone.getTimeZone(timeZoneId);
         int offset = zone.getRawOffset() / 1000;
         int hour = offset / 3600;
-        int min = (offset % 3600) / 60;
+        int min = Math.abs((offset % 3600) / 60);
 
         DateFormat dfm = new SimpleDateFormat("z");
         dfm.setTimeZone(zone);
@@ -74,7 +76,7 @@ public static String convertToAlphaNumericFormat(String timeZoneId) {
     public static String createTimeZoneString(TimeZone timeZone) {
         int offset = timeZone.getRawOffset() / 1000;
         int hour = offset / 3600;
-        int minutes = (offset % 3600) / 60;
+        int minutes = Math.abs((offset % 3600) / 60);
         
         return String.format("(GMT%+d:%02d) %s", hour, minutes, timeZone.getID()); //NON-NLS
     }
@@ -83,9 +85,11 @@ public static String createTimeZoneString(TimeZone timeZone) {
      * Generates a list of time zones.
      */
     public static List<String> createTimeZoneList() {
-        List<String> timeZoneList = new ArrayList<>();
+        /*
+         * Create a list of time zones.
+         */
+        List<TimeZone> timeZoneList = new ArrayList<>();
         
-        // load and add all timezone
         String[] ids = SimpleTimeZone.getAvailableIDs();
         for (String id : ids) {
             /*
@@ -97,10 +101,36 @@ public static List<String> createTimeZoneList() {
              * if(hasDaylight){ result = result + second; }
              * timeZoneComboBox.addItem(item + " (" + result + ")");
              */
-            timeZoneList.add(createTimeZoneString(TimeZone.getTimeZone(id)));
+            timeZoneList.add(TimeZone.getTimeZone(id));
         }
         
-        return timeZoneList;
+        /*
+         * Sort the list of time zones first by offset, then by ID.
+         */
+        Collections.sort(timeZoneList, new Comparator<TimeZone>(){
+            @Override
+            public int compare(TimeZone o1, TimeZone o2){
+                int offsetDelta = Integer.compare(o1.getRawOffset(), o2.getRawOffset());
+                
+                if (offsetDelta == 0) {
+                    return o1.getID().compareToIgnoreCase(o2.getID());
+                }
+                
+                return offsetDelta;
+            }
+        });
+        
+        /*
+         * Create a list of Strings encompassing both the GMT offset and the
+         * time zone ID.
+         */
+        List<String> outputList = new ArrayList<>();
+        
+        for (TimeZone timeZone : timeZoneList) {
+            outputList.add(createTimeZoneString(timeZone));
+        }
+        
+        return outputList;
     }
 
     /**
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java
index 3ab5f6cb90d17432e7f36e107ce56f8a491b5c41..e950d0e870ad241784f11948b04befa09e84f8ea 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/ExtractedContent.java
@@ -156,6 +156,8 @@ static String getIconFilePath(int typeID) {
             return filePath + "drive_network.png"; //NON-NLS
         } else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_FACE_DETECTED.getTypeID()) {
             return filePath + "face.png"; //NON-NLS
+        } else if (typeID == BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK.getTypeID()) {
+            return filePath + "network-wifi.png"; //NON-NLS
         }
         return filePath + "artifact-icon.png"; //NON-NLS
     }
diff --git a/Core/src/org/sleuthkit/autopsy/images/network-wifi.png b/Core/src/org/sleuthkit/autopsy/images/network-wifi.png
new file mode 100644
index 0000000000000000000000000000000000000000..5d7cb76ca165db10f3df4f8b07813eee8b335a7c
Binary files /dev/null and b/Core/src/org/sleuthkit/autopsy/images/network-wifi.png differ
diff --git a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
index 39e428d233e0eebc494009aefadd0fc42aad3311..c1aec6c81bae0f828601dd0738a939dfe45ef3c1 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/embeddedfileextractor/SevenZipExtractor.java
@@ -21,9 +21,6 @@
 import java.io.File;
 import java.io.FileOutputStream;
 import java.io.IOException;
-import java.io.OutputStream;
-import java.nio.file.Files;
-import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -93,7 +90,7 @@ class SevenZipExtractor {
     private String moduleDirAbsolute;
 
     private Blackboard blackboard;
-    
+
     private ProgressHandle progress;
     private int numItems;
     private String currentArchiveName;
@@ -164,18 +161,19 @@ boolean isSevenZipExtractionSupported(AbstractFile file) {
      *
      * More heuristics to be added here
      *
-     * @param archiveFile     the AbstractFile for the parent archive which
-     *                        which we are checking
-     * @param inArchive       The SevenZip archive currently open for extraction
-     * 
-     * @param inArchiveItemIndex Index of item inside the SevenZip archive. Each 
-     *                           file inside an archive is associated with a unique 
-     *                           integer
-     * 
-     * @param depthMap        a concurrent hashmap which keeps track of the
-     *                        depth of all nested archives, key of objectID
-     * @param escapedFilePath the path to the archiveFileItem which has been
-     *                        escaped
+     * @param archiveFile        the AbstractFile for the parent archive which
+     *                           which we are checking
+     * @param inArchive          The SevenZip archive currently open for
+     *                           extraction
+     *
+     * @param inArchiveItemIndex Index of item inside the SevenZip archive. Each
+     *                           file inside an archive is associated with a
+     *                           unique integer
+     *
+     * @param depthMap           a concurrent hashmap which keeps track of the
+     *                           depth of all nested archives, key of objectID
+     * @param escapedFilePath    the path to the archiveFileItem which has been
+     *                           escaped
      *
      * @return true if potential zip bomb, false otherwise
      */
@@ -551,7 +549,7 @@ boolean unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthM
             numItems = inArchive.getNumberOfItems();
             progress.start(numItems);
             progressStarted = true;
-            
+
             //setup the archive local root folder
             final String uniqueArchiveFileName = FileUtil.escapeFileName(EmbeddedFileExtractorIngestModule.getUniqueName(archiveFile));
             try {
@@ -605,7 +603,7 @@ boolean unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthM
                         inArchiveItemIndex, PropID.SIZE);
                 if (freeDiskSpace != IngestMonitor.DISK_FREE_SPACE_UNKNOWN && archiveItemSize != null && archiveItemSize > 0) { //if free space is known and file is not empty.
                     String archiveItemPath = (String) inArchive.getProperty(
-                        inArchiveItemIndex, PropID.PATH);
+                            inArchiveItemIndex, PropID.PATH);
                     long newDiskSpace = freeDiskSpace - archiveItemSize;
                     if (newDiskSpace < MIN_FREE_DISK_SPACE) {
                         String msg = NbBundle.getMessage(SevenZipExtractor.class,
@@ -677,7 +675,7 @@ boolean unpack(AbstractFile archiveFile, ConcurrentHashMap<Long, Archive> depthM
             inArchive.extract(extractionIndices, false, archiveCallBack);
 
             unpackSuccessful = unpackSuccessful & archiveCallBack.wasSuccessful();
-            
+
             archiveDetailsMap = null;
 
             // add them to the DB. We wait until the end so that we have the metadata on all of the
@@ -795,140 +793,57 @@ private int[] getExtractableFilesFromDetailsMap(
                 .mapToInt(Integer::intValue)
                 .toArray();
     }
-
+    
     /**
-     * Stream used to unpack the archive to local file
+     * UnpackStream used by the SevenZipBindings to do archive extraction. A memory
+     * leak exists in the SevenZip library that will not let go of the streams until
+     * the entire archive extraction is complete. Instead of creating a new UnpackStream
+     * for every file in the archive, instead we just rebase our EncodedFileOutputStream pointer
+     * for every new file.
      */
-    private abstract static class UnpackStream implements ISequentialOutStream {
+    private final static class UnpackStream implements ISequentialOutStream {
 
-        private OutputStream output;
+        private EncodedFileOutputStream output;
         private String localAbsPath;
-
-        UnpackStream(String localAbsPath) {
+        private int bytesWritten;
+        
+        UnpackStream(String localAbsPath) throws IOException {
+            this.output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1);
             this.localAbsPath = localAbsPath;
-            try {
-                output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1);
-            } catch (IOException ex) {
-                logger.log(Level.SEVERE, "Error writing extracted file: " + localAbsPath, ex); //NON-NLS
-            }
-
-        }
-
-        public abstract long getSize();
-
-        OutputStream getOutput() {
-            return output;
-        }
-
-        String getLocalAbsPath() {
-            return localAbsPath;
-        }
-
-        public void close() {
-            if (output != null) {
-                try {
-                    output.flush();
-                    output.close();
-                    output = null;
-                } catch (IOException e) {
-                    logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS
-                }
-            }
-        }
-    }
-
-    /**
-     * Stream used to unpack the archive of unknown size to local file
-     */
-    private static class UnknownSizeUnpackStream extends UnpackStream {
-
-        private long freeDiskSpace;
-        private boolean outOfSpace = false;
-        private long bytesWritten = 0;
-
-        UnknownSizeUnpackStream(String localAbsPath, long freeDiskSpace) {
-            super(localAbsPath);
-            this.freeDiskSpace = freeDiskSpace;
+            this.bytesWritten = 0;
+        } 
+        
+        public void setNewOutputStream(String localAbsPath) throws IOException {
+            this.output.close();
+            this.output = new EncodedFileOutputStream(new FileOutputStream(localAbsPath), TskData.EncodingType.XOR1);
+            this.localAbsPath = localAbsPath;
+            this.bytesWritten = 0;
         }
-
-        @Override
-        public long getSize() {
-            return this.bytesWritten;
+        
+        public int getSize() {
+            return bytesWritten;
         }
-
+        
         @Override
         public int write(byte[] bytes) throws SevenZipException {
             try {
-                // If the content size is unknown, cautiously write to disk.
-                // Write only if byte array is less than 80% of the current
-                // free disk space.
-                if (freeDiskSpace == IngestMonitor.DISK_FREE_SPACE_UNKNOWN || bytes.length < 0.8 * freeDiskSpace) {
-                    getOutput().write(bytes);
-                    // NOTE: this method is called multiple times for a
-                    // single extractSlow() call. Update bytesWritten and
-                    // freeDiskSpace after every write operation.
-                    this.bytesWritten += bytes.length;
-                    this.freeDiskSpace -= bytes.length;
-                } else {
-                    this.outOfSpace = true;
-                    logger.log(Level.INFO, NbBundle.getMessage(
-                            SevenZipExtractor.class,
-                            "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
-                    throw new SevenZipException(
-                            NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.noSpace.msg"));
-                }
+                output.write(bytes);
+                this.bytesWritten += bytes.length;
             } catch (IOException ex) {
                 throw new SevenZipException(
-                        NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
-                                getLocalAbsPath()), ex);
+                    NbBundle.getMessage(SevenZipExtractor.class, 
+                            "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
+                            localAbsPath), ex);
             }
             return bytes.length;
         }
-
-        @Override
-        public void close() {
-            if (getOutput() != null) {
-                try {
-                    getOutput().flush();
-                    getOutput().close();
-                    if (this.outOfSpace) {
-                        Files.delete(Paths.get(getLocalAbsPath()));
-                    }
-                } catch (IOException e) {
-                    logger.log(Level.SEVERE, "Error closing unpack stream for file: {0}", getLocalAbsPath()); //NON-NLS
-                }
-            }
-        }
-    }
-
-    /**
-     * Stream used to unpack the archive of known size to local file
-     */
-    private static class KnownSizeUnpackStream extends UnpackStream {
-
-        private long size;
-
-        KnownSizeUnpackStream(String localAbsPath, long size) {
-            super(localAbsPath);
-            this.size = size;
-        }
-
-        @Override
-        public long getSize() {
-            return this.size;
-        }
-
-        @Override
-        public int write(byte[] bytes) throws SevenZipException {
-            try {
-                getOutput().write(bytes);
-            } catch (IOException ex) {
-                throw new SevenZipException(
-                        NbBundle.getMessage(SevenZipExtractor.class, "EmbeddedFileExtractorIngestModule.ArchiveExtractor.UnpackStream.write.exception.msg",
-                                getLocalAbsPath()), ex);
-            }
-            return bytes.length;
+        
+        public void close() throws IOException {
+           try(EncodedFileOutputStream out = output) {
+               out.flush();
+           }
         }
+        
     }
 
     /**
@@ -973,9 +888,8 @@ private static class StandardIArchiveExtractCallback
         private UnpackStream unpackStream = null;
         private final Map<Integer, InArchiveItemDetails> archiveDetailsMap;
         private final ProgressHandle progressHandle;
-        
+
         private int inArchiveItemIndex;
-        private final long freeDiskSpace;
 
         private long createTimeInSeconds;
         private long modTimeInSeconds;
@@ -992,7 +906,6 @@ public StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
                 String password, long freeDiskSpace) {
 
             this.inArchive = inArchive;
-            this.freeDiskSpace = freeDiskSpace;
             this.progressHandle = progressHandle;
             this.archiveFile = archiveFile;
             this.archiveDetailsMap = archiveDetailsMap;
@@ -1000,19 +913,21 @@ public StandardIArchiveExtractCallback(ISevenZipInArchive inArchive,
         }
 
         /**
-         * Get stream is called by the internal framework as it traverses 
-         * the archive structure. The ISequentialOutStream is where the 
-         * archive file contents will be expanded and written to the local disk.
-         * 
+         * Get stream is called by the internal framework as it traverses the
+         * archive structure. The ISequentialOutStream is where the archive file
+         * contents will be expanded and written to the local disk.
+         *
          * Skips folders, as there is nothing to extract.
-         * 
-         * @param inArchiveItemIndex current location of the 
-         * @param mode Will always be EXTRACT
+         *
+         * @param inArchiveItemIndex current location of the
+         * @param mode               Will always be EXTRACT
+         *
          * @return
-         * @throws SevenZipException 
+         *
+         * @throws SevenZipException
          */
         @Override
-        public ISequentialOutStream getStream(int inArchiveItemIndex, 
+        public ISequentialOutStream getStream(int inArchiveItemIndex,
                 ExtractAskMode mode) throws SevenZipException {
 
             this.inArchiveItemIndex = inArchiveItemIndex;
@@ -1023,28 +938,36 @@ public ISequentialOutStream getStream(int inArchiveItemIndex,
                 return null;
             }
 
-            final Long archiveItemSize = (Long) inArchive.getProperty(
-                    inArchiveItemIndex, PropID.SIZE);
             final String localAbsPath = archiveDetailsMap.get(
                     inArchiveItemIndex).getLocalAbsPath();
-
-            if (archiveItemSize != null) {
-                unpackStream = new SevenZipExtractor.KnownSizeUnpackStream(
-                        localAbsPath, archiveItemSize);
-            } else {
-                unpackStream = new SevenZipExtractor.UnknownSizeUnpackStream(
-                        localAbsPath, freeDiskSpace);
+            
+            //If the Unpackstream has been allocated, then set the Outputstream 
+            //to another file rather than creating a new unpack stream. The 7Zip 
+            //binding has a memory leak, so creating new unpack streams will not be
+            //dereferenced. As a fix, we create one UnpackStream, and mutate its state,
+            //so that there only exists one 8192 byte buffer in memory per archive.
+            try {
+                if (unpackStream != null) {
+                    unpackStream.setNewOutputStream(localAbsPath);
+                } else {
+                    unpackStream = new UnpackStream(localAbsPath);
+                }
+            } catch (IOException ex) {
+                logger.log(Level.WARNING, String.format("Error opening or setting new stream " //NON-NLS
+                        + "for archive file at %s", localAbsPath), ex.getMessage()); //NON-NLS
+                return null;
             }
 
             return unpackStream;
         }
 
         /**
-         * Retrieves the file metadata from the archive before extraction. 
+         * Retrieves the file metadata from the archive before extraction.
          * Called after getStream.
-         * 
+         *
          * @param mode Will always be EXTRACT.
-         * @throws SevenZipException 
+         *
+         * @throws SevenZipException
          */
         @Override
         public void prepareOperation(ExtractAskMode mode) throws SevenZipException {
@@ -1061,18 +984,18 @@ public void prepareOperation(ExtractAskMode mode) throws SevenZipException {
                     : writeTime.getTime() / 1000;
             accessTimeInSeconds = accessTime == null ? 0L
                     : accessTime.getTime() / 1000;
-            
+
             progressHandle.progress(archiveFile.getName() + ": "
                     + (String) inArchive.getProperty(inArchiveItemIndex, PropID.PATH),
                     inArchiveItemIndex);
-            
+
         }
 
         /**
          * Updates the unpackedNode data in the tree after the archive has been
-         * expanded to local disk. 
+         * expanded to local disk.
          *
-         * @param result - ExtractOperationResult 
+         * @param result - ExtractOperationResult
          *
          * @throws SevenZipException
          */
@@ -1089,7 +1012,7 @@ public void setOperationResult(ExtractOperationResult result) throws SevenZipExc
                         localRelPath);
                 return;
             }
-            
+
             final String localAbsPath = archiveDetailsMap.get(
                     inArchiveItemIndex).getLocalAbsPath();
             if (result != ExtractOperationResult.OK) {
@@ -1103,7 +1026,11 @@ public void setOperationResult(ExtractOperationResult result) throws SevenZipExc
                     !(Boolean) inArchive.getProperty(inArchiveItemIndex, PropID.IS_FOLDER),
                     0L, createTimeInSeconds, accessTimeInSeconds, modTimeInSeconds, localRelPath);
 
-            unpackStream.close();
+            try {
+                unpackStream.close();
+            } catch (IOException e) {
+                logger.log(Level.WARNING, "Error closing unpack stream for file: {0}", localAbsPath); //NON-NLS
+            }
         }
 
         @Override
@@ -1214,9 +1141,9 @@ private UnpackedNode addNode(UnpackedNode parent, List<String> tokenPath) {
          */
         List<AbstractFile> getRootFileObjects() {
             List<AbstractFile> ret = new ArrayList<>();
-            for (UnpackedNode child : rootNode.getChildren()) {
+            rootNode.getChildren().forEach((child) -> {
                 ret.add(child.getFile());
-            }
+            });
             return ret;
         }
 
@@ -1228,17 +1155,17 @@ List<AbstractFile> getRootFileObjects() {
          */
         List<AbstractFile> getAllFileObjects() {
             List<AbstractFile> ret = new ArrayList<>();
-            for (UnpackedNode child : rootNode.getChildren()) {
+            rootNode.getChildren().forEach((child) -> {
                 getAllFileObjectsRec(ret, child);
-            }
+            });
             return ret;
         }
 
         private void getAllFileObjectsRec(List<AbstractFile> list, UnpackedNode parent) {
             list.add(parent.getFile());
-            for (UnpackedNode child : parent.getChildren()) {
+            parent.getChildren().forEach((child) -> {
                 getAllFileObjectsRec(list, child);
-            }
+            });
         }
 
         /**
diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties
index c461fc193f4c039a7ce27b5898d4a9bce229391e..11ace044fbccbadffc7e88a1290855d467570a61 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties
+++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/Bundle.properties
@@ -91,7 +91,7 @@ HashDbImportDatabaseDialog.errorMessage.failedToOpenHashDbMsg=Failed to open has
 HashLookupModuleFactory.moduleName.text=Hash Lookup
 HashLookupModuleFactory.moduleDescription.text=Identifies known and notable files using supplied hash sets, such as a standard NSRL hash set.
 HashDbIngestModule.fileReadErrorMsg=Read Error\: {0}
-HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0}.
+HashDbIngestModule.calcHashValueErr=Error encountered while calculating the hash value for {0} ({1}).
 HashDbIngestModule.hashLookupErrorMsg=Hash Lookup Error\: {0}
 HashDbIngestModule.settingKnownBadStateErr=Error encountered while setting notable state for {0}.
 HashDbIngestModule.lookingUpKnownBadHashValueErr=Error encountered while looking up notable hash value for {0}.
diff --git a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
index ee3ba1c08912e5a9312cb95a64f630b2b366732d..23749b8cba9b6acb94e7bd3992973f923ca9a2ce 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/hashdatabase/HashDbIngestModule.java
@@ -229,7 +229,9 @@ public ProcessResult process(AbstractFile file) {
                 services.postMessage(IngestMessage.createErrorMessage(
                         HashLookupModuleFactory.getModuleName(),
                         NbBundle.getMessage(this.getClass(), "HashDbIngestModule.fileReadErrorMsg", name),
-                        NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr", name)));
+                        NbBundle.getMessage(this.getClass(), "HashDbIngestModule.calcHashValueErr", 
+                                file.getParentPath() + file.getName(), 
+                                file.isMetaFlagSet(TskData.TSK_FS_META_FLAG_ENUM.ALLOC)?"Allocated File" : "Deleted File")));
                 return ProcessResult.ERROR;
             }
         }
diff --git a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java
index c67d83080615cc076e9a13439ac810c373e223cf..7f8285c4a1126d1ea408872d05a4d042b0a7c89c 100644
--- a/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java
+++ b/Core/src/org/sleuthkit/autopsy/modules/interestingitems/FilesSetRulePanel.java
@@ -471,7 +471,7 @@ FilesSet.Rule.FileNameCondition getFileNameCondition() throws IllegalStateExcept
         if (!this.nameTextField.getText().isEmpty()) {
             if (this.nameRegexCheckbox.isSelected()) {
                 try {
-                    Pattern pattern = Pattern.compile(this.nameTextField.getText());
+                    Pattern pattern = Pattern.compile(this.nameTextField.getText(), Pattern.CASE_INSENSITIVE);
                     if (this.fullNameRadioButton.isSelected()) {
                         condition = new FilesSet.Rule.FullNameCondition(pattern);
                     } else {
@@ -556,7 +556,7 @@ FilesSet.Rule.ParentPathCondition getPathCondition() throws IllegalStateExceptio
         if (!this.pathTextField.getText().isEmpty()) {
             if (this.pathRegexCheckBox.isSelected()) {
                 try {
-                    condition = new FilesSet.Rule.ParentPathCondition(Pattern.compile(this.pathTextField.getText()));
+                    condition = new FilesSet.Rule.ParentPathCondition(Pattern.compile(this.pathTextField.getText(), Pattern.CASE_INSENSITIVE));
                 } catch (PatternSyntaxException ex) {
                     logger.log(Level.SEVERE, "Attempt to get malformed path condition", ex); // NON-NLS
                     throw new IllegalStateException("The files set rule panel path condition is not in a valid state"); // NON-NLS
diff --git a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java
index 81c4c9ac728f5db1c49a6088d02fc2bcfe4cbcec..13a507865836cd09144e3da1368023fe59591bb9 100644
--- a/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java
+++ b/Core/src/org/sleuthkit/autopsy/report/ReportHTML.java
@@ -271,6 +271,9 @@ private String useDataTypeIcon(String dataType) {
                 case TSK_ACCOUNT:
                     in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/accounts.png"); //NON-NLS
                     break;
+                case TSK_WIFI_NETWORK:
+                    in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/network-wifi.png"); //NON-NLS
+                    break;
                 default:
                     logger.log(Level.WARNING, "useDataTypeIcon: unhandled artifact type = {0}", dataType); //NON-NLS
                     in = getClass().getResourceAsStream("/org/sleuthkit/autopsy/report/images/star.png"); //NON-NLS
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java
index e62c58343c05f8360db46c0960068fa08c707d87..2275b63d5f3b5a83114bb8ddcc7c2526eff903e3 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AddArchiveTask.java
@@ -41,6 +41,7 @@
 import org.sleuthkit.autopsy.datasourceprocessors.AutoIngestDataSourceProcessor;
 import org.sleuthkit.autopsy.coreutils.TimeStampUtils;
 import org.sleuthkit.datamodel.Content;
+import org.sleuthkit.datamodel.DataSource;
 
 /*
  * A runnable that adds an archive data source as well as data sources contained
@@ -195,9 +196,18 @@ public void run() {
                             continue;
                         }
 
-                        // if we are here it means the data source was addedd successfully
+                        // if we are here it means the data source was added successfully
                         success = true;
                         newDataSources.addAll(internalDataSource.getContent());
+                        
+                        // Update the names for all new data sources to be the root archive plus the name of the data source
+                        for (Content c:internalDataSource.getContent()) {
+                            if (c instanceof DataSource) {
+                                DataSource ds = (DataSource) c;
+                                String newName = Paths.get(archivePath).getFileName() + "/" + ds.getName();
+                                ds.setDisplayName(newName);
+                            }
+                        }
 
                         // skip all other DSPs for this data source
                         break;
diff --git a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java
index cfdf6cf9736582da7bbd5f0b677b84694cf16b45..7f80201f26d9d93d622486af516bd713d816621f 100644
--- a/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java
+++ b/Experimental/src/org/sleuthkit/autopsy/experimental/autoingest/AutoIngestMonitor.java
@@ -45,6 +45,7 @@
 import org.sleuthkit.autopsy.coordinationservice.CoordinationService.CoordinationServiceException;
 import org.sleuthkit.autopsy.coreutils.Logger;
 import org.sleuthkit.autopsy.coreutils.NetworkUtils;
+import org.sleuthkit.autopsy.coreutils.StopWatch;
 import org.sleuthkit.autopsy.events.AutopsyEventException;
 import org.sleuthkit.autopsy.events.AutopsyEventPublisher;
 import org.sleuthkit.autopsy.experimental.autoingest.AutoIngestJob.ProcessingStatus;
@@ -666,43 +667,73 @@ void reprocessJob(AutoIngestJob job) throws AutoIngestMonitorException {
      * @return A result code indicating success, partial success, or failure.
      */
     CaseDeletionResult deleteCase(AutoIngestJob job) {
+        String caseName = job.getManifest().getCaseName();
+        Path caseDirectoryPath = job.getCaseDirectoryPath();
+        Path metadataFilePath = caseDirectoryPath.resolve(caseName + CaseMetadata.getFileExtension());
+        StopWatch stopWatch = new StopWatch();
+        stopWatch.start();
         synchronized (jobsLock) {
-            String caseName = job.getManifest().getCaseName();
-            Path metadataFilePath = job.getCaseDirectoryPath().resolve(caseName + CaseMetadata.getFileExtension());
-
+            stopWatch.stop();
+            LOGGER.log(Level.INFO, String.format("Used %d s to acquire jobsLock (Java monitor in AutoIngestMonitor class) for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
+            stopWatch.reset();
+            stopWatch.start();
             try {
                 CaseMetadata metadata = new CaseMetadata(metadataFilePath);
+                stopWatch.stop();
+                LOGGER.log(Level.INFO, String.format("Used %d s to read case metadata for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
+                stopWatch.reset();
+                stopWatch.start();
                 Case.deleteCase(metadata);
-
             } catch (CaseMetadata.CaseMetadataException ex) {
-                LOGGER.log(Level.SEVERE, String.format("Failed to get case metadata file %s for case %s at %s", metadataFilePath.toString(), caseName, job.getCaseDirectoryPath().toString()), ex);
+                LOGGER.log(Level.SEVERE, String.format("Failed to read case metadata file %s for case %s at %s", metadataFilePath, caseName, caseDirectoryPath), ex);
+                stopWatch.stop();
+                LOGGER.log(Level.INFO, String.format("Used %d s to fail to read case metadata file %s for case %s at %s", stopWatch.getElapsedTimeSecs(), metadataFilePath, caseName, caseDirectoryPath));
                 return CaseDeletionResult.FAILED;
             } catch (CaseActionException ex) {
-                LOGGER.log(Level.SEVERE, String.format("Failed to physically delete case %s at %s", caseName, job.getCaseDirectoryPath().toString()), ex);
+                LOGGER.log(Level.SEVERE, String.format("Failed to delete case %s at %s", caseName, caseDirectoryPath), ex);
                 return CaseDeletionResult.FAILED;
             }
 
             // Update the state of completed jobs associated with this case to indicate
             // that the case has been deleted
-            for (AutoIngestJob completedJob : getCompletedJobs()) {
+            stopWatch.reset();
+            stopWatch.start();
+            List<AutoIngestJob> completedJobs = getCompletedJobs();
+            stopWatch.stop();
+            LOGGER.log(Level.INFO, String.format("Used %d s to get completed jobs listing for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
+            stopWatch.reset();
+            stopWatch.start();
+            for (AutoIngestJob completedJob : completedJobs) {
                 if (caseName.equals(completedJob.getManifest().getCaseName())) {
                     try {
                         completedJob.setProcessingStatus(DELETED);
                         AutoIngestJobNodeData nodeData = new AutoIngestJobNodeData(completedJob);
                         coordinationService.setNodeData(CoordinationService.CategoryNode.MANIFESTS, completedJob.getManifest().getFilePath().toString(), nodeData.toArray());
                     } catch (CoordinationServiceException | InterruptedException ex) {
-                        LOGGER.log(Level.SEVERE, String.format("Failed to update completed job node data for %s when deleting case %s", completedJob.getManifest().getFilePath().toString(), caseName), ex);
+                        LOGGER.log(Level.SEVERE, String.format("Failed to update completed job node data for %s when deleting case %s at %s", completedJob.getManifest().getFilePath(), caseName, caseDirectoryPath), ex);
+                        stopWatch.stop();
+                        LOGGER.log(Level.INFO, String.format("Used %d s to fail to update job node data for completed jobs for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
                         return CaseDeletionResult.PARTIALLY_DELETED;
                     }
                 }
             }
+            stopWatch.stop();
+            LOGGER.log(Level.INFO, String.format("Used %d s to update job node data for completed jobs for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
 
             // Remove jobs associated with this case from the completed jobs collection.
-            jobsSnapshot.completedJobs.removeIf((AutoIngestJob completedJob)
+            stopWatch.reset();
+            stopWatch.start();
+            completedJobs.removeIf((AutoIngestJob completedJob)
                     -> completedJob.getManifest().getCaseName().equals(caseName));
+            stopWatch.stop();
+            LOGGER.log(Level.INFO, String.format("Used %d s to remove completed jobs for case %s at %s from current jobs snapshot", stopWatch.getElapsedTimeSecs(), caseName, caseDirectoryPath));
 
             // Publish a message to update auto ingest nodes.
+            stopWatch.reset();
+            stopWatch.start();
             eventPublisher.publishRemotely(new AutoIngestCaseDeletedEvent(caseName, LOCAL_HOST_NAME, AutoIngestManager.getSystemUserNameProperty()));
+            stopWatch.stop();
+            LOGGER.log(Level.INFO, String.format("Used %d s to publish job deletion event for case %s at %s", stopWatch.getElapsedTimeSecs(), caseName,caseDirectoryPath));
         }
 
         return CaseDeletionResult.FULLY_DELETED;
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java
index a0300e72c5e6c517712e82a63cd3ac19a233190b..bc78be5fd7f9c34bfde0a8634e7e323d91952bdf 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryController.java
@@ -233,19 +233,19 @@ && isDataSourcesTableStale()
         dbTaskQueueSize.addListener(obs -> this.updateRegroupDisabled());
 
     }
-    
+
     /**
      * @return Currently displayed group or null if nothing is being displayed
      */
     public GroupViewState getViewState() {
         return historyManager.getCurrentState();
     }
-    
+
     /**
      * Get observable property of the current group. The UI currently changes
-     * based on this property changing, which happens when other actions and 
+     * based on this property changing, which happens when other actions and
      * threads call advance().
-     * 
+     *
      * @return Currently displayed group (as a property that can be observed)
      */
     public ReadOnlyObjectProperty<GroupViewState> viewStateProperty() {
@@ -254,7 +254,8 @@ public ReadOnlyObjectProperty<GroupViewState> viewStateProperty() {
 
     /**
      * Should the "forward" button on the history be enabled?
-     * @return 
+     *
+     * @return
      */
     public ReadOnlyBooleanProperty getCanAdvance() {
         return historyManager.getCanAdvance();
@@ -262,19 +263,19 @@ public ReadOnlyBooleanProperty getCanAdvance() {
 
     /**
      * Should the "Back" button on the history be enabled?
-     * @return 
+     *
+     * @return
      */
     public ReadOnlyBooleanProperty getCanRetreat() {
         return historyManager.getCanRetreat();
     }
 
     /**
-     * Display the passed in group.  Causes this group to 
-     * get recorded in the history queue and observers of the 
-     * current state will be notified and update their panels/widgets
-     * appropriately.
-     * 
-     * @param newState 
+     * Display the passed in group. Causes this group to get recorded in the
+     * history queue and observers of the current state will be notified and
+     * update their panels/widgets appropriately.
+     *
+     * @param newState
      */
     @ThreadConfined(type = ThreadConfined.ThreadType.ANY)
     public void advance(GroupViewState newState) {
@@ -283,7 +284,8 @@ public void advance(GroupViewState newState) {
 
     /**
      * Display the next group in the "forward" history stack
-     * @return 
+     *
+     * @return
      */
     public GroupViewState advance() {
         return historyManager.advance();
@@ -291,7 +293,8 @@ public GroupViewState advance() {
 
     /**
      * Display the previous group in the "back" history stack
-     * @return 
+     *
+     * @return
      */
     public GroupViewState retreat() {
         return historyManager.retreat();
@@ -500,10 +503,6 @@ public DrawableFile getFileFromID(Long fileID) throws TskCoreException {
         return drawableDB.getFileFromID(fileID);
     }
 
-    public ReadOnlyDoubleProperty regroupProgress() {
-        return groupManager.regroupProgress();
-    }
-
     public HashSetManager getHashSetManager() {
         return hashSetManager;
     }
@@ -763,16 +762,17 @@ public void run() {
                 // Cycle through all of the files returned and call processFile on each
                 //do in transaction
                 drawableDbTransaction = taskDB.beginTransaction();
-                
-                /* We are going to periodically commit the CaseDB transaction and sleep so
-                 * that the user can have Autopsy do other stuff while these bulk tasks are ongoing.
+
+                /* We are going to periodically commit the CaseDB transaction
+                 * and sleep so that the user can have Autopsy do other stuff
+                 * while these bulk tasks are ongoing.
                  */
                 int caseDbCounter = 0;
                 for (final AbstractFile f : files) {
                     if (caseDbTransaction == null) {
                         caseDbTransaction = tskCase.beginTransaction();
                     }
-                    
+
                     if (isCancelled() || Thread.interrupted()) {
                         logger.log(Level.WARNING, "Task cancelled or interrupted: not all contents may be transfered to drawable database."); //NON-NLS
                         taskCompletionStatus = false;
@@ -787,7 +787,7 @@ public void run() {
                     progressHandle.progress(f.getName(), workDone);
                     updateProgress(workDone - 1 / (double) files.size());
                     updateMessage(f.getName());
-                    
+
                     // Periodically, commit the transaction (which frees the lock) and sleep
                     // to allow other threads to get some work done in CaseDB
                     if ((++caseDbCounter % 200) == 0) {
@@ -807,12 +807,12 @@ public void run() {
                     caseDbTransaction.commit();
                     caseDbTransaction = null;
                 }
-                
+
                 // pass true so that groupmanager is notified of the changes
                 taskDB.commitTransaction(drawableDbTransaction, true);
                 drawableDbTransaction = null;
 
-            } catch (TskCoreException  | InterruptedException ex) {
+            } catch (TskCoreException | InterruptedException ex) {
                 progressHandle.progress(Bundle.BulkTask_stopCopy_status());
                 logger.log(Level.WARNING, "Stopping copy to drawable db task.  Failed to transfer all database contents", ex); //NON-NLS
                 MessageNotifyUtil.Notify.warn(Bundle.BulkTask_errPopulating_errMsg(), ex.getMessage());
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
index 88a7d38d70555ff54ac907f1cea0160fcd39a83b..8386999c69e917e477d411476fa35c67df82dc00 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/ImageGalleryModule.java
@@ -157,14 +157,14 @@ public void propertyChange(PropertyChangeEvent evt) {
                 IngestManager.getInstance().removeIngestModuleEventListener(this);
                 return;
             }
-            
+
             /* only process individual files in realtime on the node that is
              * running the ingest. on a remote node, image files are processed
              * enblock when ingest is complete */
             if (((AutopsyEvent) evt).getSourceType() != AutopsyEvent.SourceType.LOCAL) {
                 return;
             }
-            
+
             // Bail out if the case is closed
             try {
                 if (controller == null || Case.getCurrentCaseThrows() == null) {
@@ -211,8 +211,8 @@ else if (FileTypeUtils.getAllSupportedExtensions().contains(file.getNameExtensio
                 }
             }
             else if (IngestManager.IngestModuleEvent.valueOf(evt.getPropertyName()) == DATA_ADDED) {
-                ModuleDataEvent mde = (ModuleDataEvent)evt.getOldValue();
-                
+                ModuleDataEvent mde = (ModuleDataEvent) evt.getOldValue();
+
                 if (mde.getBlackboardArtifactType().getTypeID() == ARTIFACT_TYPE.TSK_METADATA_EXIF.getTypeID()) {
                     DrawableDB drawableDB = controller.getDatabase();
                     if (mde.getArtifacts() != null) {
@@ -291,13 +291,13 @@ public void propertyChange(PropertyChangeEvent evt) {
                     break;
                 case CONTENT_TAG_ADDED:
                     final ContentTagAddedEvent tagAddedEvent = (ContentTagAddedEvent) evt;
-                    
+
                     long objId = tagAddedEvent.getAddedTag().getContent().getId();
-                    
+
                     // update the cache
                     DrawableDB drawableDB = controller.getDatabase();
                     drawableDB.addTagCache(objId);
-                    
+
                     if (con.getDatabase().isInDB(objId)) {
                         con.getTagsManager().fireTagAddedEvent(tagAddedEvent);
                     }
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/DrawableGroup.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/DrawableGroup.java
index bdb54c448fa8c77482eabfc471b8c4802c0a1035..5469afe4d3f90330d412d3d83ee6f360ab215ca6 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/DrawableGroup.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/DrawableGroup.java
@@ -68,13 +68,6 @@ public static String getBlankGroupName() {
     DrawableGroup(GroupKey<?> groupKey, Set<Long> filesInGroup, boolean seen) {
         this.groupKey = groupKey;
         this.fileIDs.setAll(filesInGroup);
-        fileIDs.addListener((ListChangeListener.Change<? extends Long> listchange) -> {
-            boolean seenChanged = false;
-            while (false == seenChanged && listchange.next()) {
-                seenChanged |= listchange.wasAdded();
-            }
-            invalidateProperties(seenChanged);
-        });
         this.seen.set(seen);
     }
 
@@ -183,15 +176,21 @@ synchronized void addFile(Long f) {
         if (fileIDs.contains(f) == false) {
             fileIDs.add(f);
         }
+        // invalidate no matter what because the file could have new hash hits, etc.
+        invalidateProperties(true);
     }
 
     synchronized void setFiles(Set<? extends Long> newFileIds) {
         fileIDs.removeIf(fileID -> newFileIds.contains(fileID) == false);
+        invalidateProperties(false);
         newFileIds.stream().forEach(this::addFile);
     }
 
     synchronized void removeFile(Long f) {
-        fileIDs.removeAll(f);
+        if (fileIDs.contains(f)) {
+            fileIDs.removeAll(f);
+            invalidateProperties(false);
+        }
     }
 
     private void invalidateProperties(boolean seenChanged) {
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java
index f25142b406e4b3867933a43bbc8bd57127aed64e..bfd0fab496ea32a2fc1709ac117e30aaf1ec7a0c 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/datamodel/grouping/GroupManager.java
@@ -50,6 +50,7 @@
 import javafx.beans.property.ReadOnlyDoubleProperty;
 import javafx.beans.property.ReadOnlyObjectProperty;
 import javafx.beans.property.ReadOnlyObjectWrapper;
+import javafx.beans.property.ReadOnlyStringProperty;
 import javafx.collections.FXCollections;
 import javafx.collections.ObservableList;
 import javafx.concurrent.Service;
@@ -278,10 +279,10 @@ public ListenableFuture<?> markGroupSeen(DrawableGroup group, boolean seen) {
     }
 
     /**
-     * Update unseenGroups list accordingly based on the current status
-     * of 'group'. Removes it if it is seen or adds it if it is unseen.
-     * 
-     * @param group 
+     * Update unseenGroups list accordingly based on the current status of
+     * 'group'. Removes it if it is seen or adds it if it is unseen.
+     *
+     * @param group
      */
     synchronized private void updateUnSeenGroups(DrawableGroup group) {
         if (group.isSeen()) {
@@ -505,6 +506,10 @@ public ReadOnlyDoubleProperty regroupProgress() {
         return regrouper.progressProperty();
     }
 
+    public ReadOnlyStringProperty regroupMessage() {
+        return regrouper.messageProperty();
+    }
+
     @Subscribe
     synchronized public void handleTagAdded(ContentTagAddedEvent evt) {
         GroupKey<?> newGroupKey = null;
@@ -619,7 +624,7 @@ synchronized public void handleFileUpdate(Collection<Long> updatedFileIDs) {
      * If the group is analyzed (or other criteria based on grouping) and should
      * be shown to the user, then add it to the appropriate data structures so
      * that it can be viewed.
-     * 
+     *
      * @returns null if Group is not ready to be viewed
      */
     synchronized private DrawableGroup popuplateIfAnalyzed(GroupKey<?> groupKey, ReGroupTask<?> task) {
@@ -730,12 +735,7 @@ synchronized public void setCollaborativeMode(Boolean newValue) {
      */
     @SuppressWarnings({"unchecked", "rawtypes"})
     @NbBundle.Messages({"# {0} - groupBy attribute Name",
-        "# {1} - sortBy name",
-        "# {2} - sort Order",
-        "ReGroupTask.displayTitle=regrouping files by {0} sorted by {1} in {2} order",
-        "# {0} - groupBy attribute Name",
-        "# {1} - atribute value",
-        "ReGroupTask.progressUpdate=regrouping files by {0} : {1}"})
+        "ReGroupTask.displayTitle=regrouping by {0}: " })
     class ReGroupTask<AttrValType extends Comparable<AttrValType>> extends LoggedTask<Void> {
 
         private final DataSource dataSource;
@@ -743,16 +743,14 @@ class ReGroupTask<AttrValType extends Comparable<AttrValType>> extends LoggedTas
         private final GroupSortBy sortBy;
         private final SortOrder sortOrder;
 
-        private final ProgressHandle groupProgress;
-
         ReGroupTask(DataSource dataSource, DrawableAttribute<AttrValType> groupBy, GroupSortBy sortBy, SortOrder sortOrder) {
-            super(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString(), sortBy.getDisplayName(), sortOrder.toString()), true);
+            super(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString() ), true);
             this.dataSource = dataSource;
             this.groupBy = groupBy;
             this.sortBy = sortBy;
             this.sortOrder = sortOrder;
 
-            groupProgress = ProgressHandle.createHandle(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString(), sortBy.getDisplayName(), sortOrder.toString()), this);
+            updateTitle(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString() ));
         }
 
         @Override
@@ -761,7 +759,8 @@ protected Void call() throws Exception {
                 if (isCancelled()) {
                     return null;
                 }
-                groupProgress.start();
+
+                updateProgress(-1, 1);
 
                 analyzedGroups.clear();
                 unSeenGroups.clear();
@@ -769,7 +768,7 @@ protected Void call() throws Exception {
                 // Get the list of group keys
                 Multimap<DataSource, AttrValType> valsByDataSource = findValuesForAttribute();
 
-                groupProgress.switchToDeterminate(valsByDataSource.entries().size());
+                updateProgress(0, valsByDataSource.entries().size());
                 int p = 0;
                 // For each key value, partially create the group and add it to the list.
                 for (final Map.Entry<DataSource, AttrValType> valForDataSource : valsByDataSource.entries()) {
@@ -777,9 +776,8 @@ protected Void call() throws Exception {
                         return null;
                     }
                     p++;
-                    updateMessage(Bundle.ReGroupTask_progressUpdate(groupBy.attrName.toString(), valForDataSource.getValue()));
+                    updateMessage(Bundle.ReGroupTask_displayTitle(groupBy.attrName.toString()) + valForDataSource.getValue());
                     updateProgress(p, valsByDataSource.size());
-                    groupProgress.progress(Bundle.ReGroupTask_progressUpdate(groupBy.attrName.toString(), valForDataSource), p);
                     popuplateIfAnalyzed(new GroupKey<>(groupBy, valForDataSource.getValue(), valForDataSource.getKey()), this);
                 }
 
@@ -808,8 +806,8 @@ protected Void call() throws Exception {
                     }
                 }
             } finally {
-                groupProgress.finish();
                 updateProgress(1, 1);
+                updateMessage("");
             }
             return null;
         }
@@ -827,12 +825,9 @@ protected void done() {
         }
 
         /**
-         * find the distinct values for the given column (DrawableAttribute)
-         *
+         * Find the distinct values for the given column (DrawableAttribute).
          * These values represent the groups of files.
          *
-         * @param groupBy
-         *
          * @return map of data source (or null if group by attribute ignores
          *         data sources) to list of unique group values
          */
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.fxml b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.fxml
index b02e2e249634b56770113652d3855930554d80b2..c0ee4488b1224a05bb03389113617a246e25068b 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.fxml
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.fxml
@@ -10,12 +10,21 @@
 <?import javafx.scene.layout.HBox?>
 <?import javafx.scene.layout.StackPane?>
 
-<fx:root id="AnchorPane" maxHeight="-Infinity" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" type="javafx.scene.layout.AnchorPane" xmlns="http://javafx.com/javafx/8.0.65" xmlns:fx="http://javafx.com/fxml/1">
+<fx:root id="AnchorPane" maxHeight="-Infinity" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" type="javafx.scene.layout.AnchorPane" xmlns="http://javafx.com/javafx/8.0.141" xmlns:fx="http://javafx.com/fxml/1">
   <children>
     <BorderPane minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" AnchorPane.bottomAnchor="0.0" AnchorPane.leftAnchor="0.0" AnchorPane.rightAnchor="0.0" AnchorPane.topAnchor="0.0">
       <right>
         <HBox alignment="CENTER_RIGHT" prefHeight="-1.0" prefWidth="-1.0" spacing="5.0" BorderPane.alignment="CENTER_RIGHT">
           <children>
+                  <Label fx:id="staleLabel" text="Some data may be out of date.  Enable listening to ingest to update.">
+      <graphic>
+                        <ImageView fitHeight="16.0" fitWidth="16.0" pickOnBounds="true" preserveRatio="true">
+      
+                        </ImageView>
+      </graphic>
+                  </Label>
             <StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" HBox.hgrow="NEVER">
               <children>
                 <ProgressBar id="progBar" fx:id="fileTaskProgresBar" focusTraversable="false" maxHeight="-1.0" maxWidth="1.7976931348623157E308" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="-1.0" progress="0.0" visible="true" />
@@ -31,37 +40,27 @@
                         <Insets />
                      </HBox.margin>
             </StackPane>
-            <StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" HBox.hgrow="NEVER">
-              <children>
-                <ProgressBar fx:id="bgTaskProgressBar" maxHeight="-1.0" maxWidth="-1.0" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="-1.0" progress="0.0" StackPane.alignment="CENTER" />
-                <Label fx:id="bgTaskLabel" alignment="CENTER" cache="false" contentDisplay="CENTER" disable="false" focusTraversable="false" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" text="" StackPane.alignment="CENTER">
-                           <StackPane.margin>
-                              <Insets left="3.0" right="3.0" />
-                           </StackPane.margin>
-                           <padding>
-                              <Insets bottom="3.0" left="3.0" right="3.0" top="3.0" />
-                           </padding></Label>
-              </children>
-<HBox.margin>
-<Insets right="5.0" />
-</HBox.margin>
-            </StackPane>
           </children>
 <BorderPane.margin>
 <Insets left="10.0" />
 </BorderPane.margin>
         </HBox>
       </right>
-<left><Label fx:id="staleLabel" text="Some data may be out of date.  Enable listening to ingest to update." BorderPane.alignment="CENTER">
-<graphic><ImageView fitHeight="16.0" fitWidth="16.0" pickOnBounds="true" preserveRatio="true">
-</ImageView>
-</graphic>
-<BorderPane.margin>
-<Insets bottom="5.0" left="5.0" right="10.0" top="5.0" />
-</BorderPane.margin></Label>
-</left>
+         <left>
+      <StackPane maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefHeight="-1.0" prefWidth="-1.0" BorderPane.alignment="CENTER">
+        <children>
+          <ProgressBar fx:id="regroupProgressBar" maxHeight="-1.0" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-1.0" prefHeight="24.0" prefWidth="500.0" progress="0.0" StackPane.alignment="CENTER_LEFT" />
+          <Label fx:id="regroupLabel" cache="false" contentDisplay="CENTER" disable="false" focusTraversable="false" maxHeight="-Infinity" maxWidth="-Infinity" minHeight="-Infinity" minWidth="-Infinity" prefWidth="500.0" text="" textOverrun="CENTER_ELLIPSIS" StackPane.alignment="CENTER_LEFT">
+                     <StackPane.margin>
+                        <Insets left="3.0" right="3.0" />
+                     </StackPane.margin>
+                     <padding>
+                        <Insets bottom="3.0" left="3.0" right="3.0" top="3.0" />
+                     </padding>
+                  </Label>
+        </children>
+      </StackPane>
+         </left>
     </BorderPane>
   </children>
 </fx:root>
diff --git a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.java b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.java
index 2355d6d2a0710206b752aab222faf05c2556f6c0..d7224c4a581dd17374b10d71b4c2637ea2125d2b 100644
--- a/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.java
+++ b/ImageGallery/src/org/sleuthkit/autopsy/imagegallery/gui/StatusBar.java
@@ -28,28 +28,26 @@
 import javafx.scene.layout.AnchorPane;
 import org.openide.util.NbBundle;
 import org.sleuthkit.autopsy.imagegallery.ImageGalleryController;
+import org.sleuthkit.autopsy.imagegallery.datamodel.grouping.GroupManager;
 
 /**
  *
  */
 public class StatusBar extends AnchorPane {
 
-    private final ImageGalleryController controller;
-
     @FXML
     private ProgressBar fileTaskProgresBar;
-
     @FXML
     private Label fileUpdateTaskLabel;
-
     @FXML
-    private Label bgTaskLabel;
-
+    private Label regroupLabel;
     @FXML
     private Label staleLabel;
-
     @FXML
-    private ProgressBar bgTaskProgressBar;
+    private ProgressBar regroupProgressBar;
+
+    private final ImageGalleryController controller;
+    private final GroupManager groupManager;
 
     @FXML
     @NbBundle.Messages({"StatusBar.fileUpdateTaskLabel.text= File Update Tasks",
@@ -58,23 +56,25 @@ public class StatusBar extends AnchorPane {
     void initialize() {
         assert fileTaskProgresBar != null : "fx:id=\"fileTaskProgresBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
         assert fileUpdateTaskLabel != null : "fx:id=\"fileUpdateTaskLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
-        assert bgTaskLabel != null : "fx:id=\"bgTaskLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
-        assert bgTaskProgressBar != null : "fx:id=\"bgTaskProgressBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
+        assert regroupLabel != null : "fx:id=\"regroupLabel\" was not injected: check your FXML file 'StatusBar.fxml'.";
+        assert regroupProgressBar != null : "fx:id=\"regroupProgressBar\" was not injected: check your FXML file 'StatusBar.fxml'.";
 
         fileUpdateTaskLabel.textProperty().bind(controller.getDBTasksQueueSizeProperty().asString().concat(Bundle.StatusBar_fileUpdateTaskLabel_text()));
         fileTaskProgresBar.progressProperty().bind(controller.getDBTasksQueueSizeProperty().negate());
 
-        controller.regroupProgress().addListener((ov, oldSize, newSize) -> {
+        groupManager.regroupProgress().addListener((ov, oldSize, newSize) -> {
             Platform.runLater(() -> {
-                if (controller.regroupProgress().lessThan(1.0).get()) {
+                if (groupManager.regroupProgress().lessThan(1.0).get()) {
                     // Regrouping in progress
-                    bgTaskProgressBar.progressProperty().setValue(-1.0);
-                    bgTaskLabel.setText(Bundle.StatusBar_bgTaskLabel_text());
+                    regroupProgressBar.progressProperty().setValue(groupManager.regroupProgress().doubleValue());
+                    regroupLabel.setText(groupManager.regroupMessage().get());
+                    
                 } else {
                     // Clear the progress bar
-                    bgTaskProgressBar.progressProperty().setValue(0.0);
-                    bgTaskLabel.setText("");
+                    regroupProgressBar.progressProperty().setValue(0.0);
+                    regroupLabel.setText("");
                 }
+                regroupLabel.setTooltip(new Tooltip(regroupLabel.getText()));
             });
         });
 
@@ -84,6 +84,7 @@ void initialize() {
 
     public StatusBar(ImageGalleryController controller) {
         this.controller = controller;
+        this.groupManager = controller.getGroupManager();
         FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("StatusBar.fxml")); //NON-NLS
         fxmlLoader.setRoot(this);
         fxmlLoader.setController(this);
@@ -93,6 +94,5 @@ public StatusBar(ImageGalleryController controller) {
         } catch (IOException exception) {
             throw new RuntimeException(exception);
         }
-
     }
 }
diff --git a/NEWS.txt b/NEWS.txt
index f93db10f9c48f27314cb88b3c595c563881f80ae..6130ca50dafb6ac67859b53824cebdb78138c2ca 100644
--- a/NEWS.txt
+++ b/NEWS.txt
@@ -1,8 +1,8 @@
----------------- VERSION 4.9.0  --------------
+---------------- VERSION 4.9.1  --------------
 Bug Fixes:
-- Fixed possible ingest deadlock from Image Gallery database inserts
-- Image Gallery does not need lock on Case DB during pre-population, which makes UI more responsive
-- Other misc Image Gallery fixes
+- Fixed possible ingest deadlock from Image Gallery database inserts.
+- Image Gallery does not need lock on Case DB during pre-population, which makes UI more responsive.
+- Other misc Image Gallery fixes.
 
 ---------------- VERSION 4.9.0  --------------
 
diff --git a/RecentActivity/nbproject/project.xml b/RecentActivity/nbproject/project.xml
index 7b7ae183476b262231ecd0e83f83c130f1233834..020377beec80b4bd4599e3a9288c71abd1216f1a 100644
--- a/RecentActivity/nbproject/project.xml
+++ b/RecentActivity/nbproject/project.xml
@@ -64,9 +64,7 @@
                     </run-dependency>
                 </dependency>
             </module-dependencies>
-            <public-packages>
-                <package>org.sleuthkit.autopsy.recentactivity</package>
-            </public-packages>
+            <public-packages/>
             <class-path-extension>
                 <runtime-relative-path>ext/gson-2.1.jar</runtime-relative-path>
                 <binary-origin>release/modules/ext/gson-2.1.jar</binary-origin>
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java
index 0f9a98cd88c12037dfed55b4e826edbe97c267ad..69381d8a139695dd176ea0f0a28c3fc726e2e657 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Chrome.java
@@ -39,6 +39,7 @@
 import java.io.IOException;
 import org.sleuthkit.autopsy.casemodule.services.FileManager;
 import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.NetworkUtils;
 import org.sleuthkit.autopsy.ingest.IngestJobContext;
 import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
 import org.sleuthkit.datamodel.AbstractFile;
@@ -163,7 +164,7 @@ private void getHistory() {
                         NbBundle.getMessage(this.getClass(), "Chrome.moduleName")));
                 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN,
                         NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
-                        (Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
+                        (NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
 
                 BlackboardArtifact bbart = this.addArtifact(ARTIFACT_TYPE.TSK_WEB_HISTORY, historyFile, bbattributes);
                 if (bbart != null) {
@@ -286,7 +287,7 @@ private void getBookmark() {
                 } else {
                     date = Long.valueOf(0);
                 }
-                String domain = Util.extractDomain(url);
+                String domain = NetworkUtils.extractDomain(url);
                 try {
                     BlackboardArtifact bbart = bookmarkFile.newArtifact(ARTIFACT_TYPE.TSK_WEB_BOOKMARK);
                     Collection<BlackboardAttribute> bbattributes = new ArrayList<>();
@@ -496,7 +497,7 @@ private void getDownload() {
                 //bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_LAST_ACCESSED.getTypeID(), "Recent Activity", "Last Visited", time));
                 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_ACCESSED,
                         NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), time));
-                String domain = Util.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); //NON-NLS
+                String domain = NetworkUtils.extractDomain((result.get("url").toString() != null) ? result.get("url").toString() : ""); //NON-NLS
                 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN,
                         NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"), domain));
                 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PROG_NAME,
@@ -590,7 +591,7 @@ private void getLogin() {
                         NbBundle.getMessage(this.getClass(), "Chrome.moduleName")));
                 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_URL_DECODED,
                         NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
-                        (Util.extractDomain((result.get("origin_url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
+                        (NetworkUtils.extractDomain((result.get("origin_url").toString() != null) ? result.get("url").toString() : "")))); //NON-NLS
                 bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_USER_NAME,
                         NbBundle.getMessage(this.getClass(), "Chrome.parentModuleName"),
                         ((result.get("username_value").toString() != null) ? result.get("username_value").toString().replaceAll("'", "''") : ""))); //NON-NLS
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java
index 90a0a4d7fa62aeebec90b662dc340cd4423bb3c9..29844b08272dce0d698f2a926023b07d50e5da59 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractIE.java
@@ -26,6 +26,7 @@
 
 import org.openide.util.NbBundle;
 import org.sleuthkit.autopsy.coreutils.ExecUtil;
+import org.sleuthkit.autopsy.coreutils.NetworkUtils;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
@@ -609,6 +610,6 @@ private String extractDomain(String url) {
             return null;
         }
         
-        return Util.extractDomain(url);
+        return NetworkUtils.extractDomain(url);
     }
 }
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
index 70607f361cd9cb266904ed2fd882704224844397..6612feb60cdc8eb432579e057a3e7b03f30126ca 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/ExtractRegistry.java
@@ -374,7 +374,9 @@ private boolean parseAutopsyPluginOutput(String regFilePath, AbstractFile regFil
             // Add all "usb" dataType nodes to collection of BlackboardArtifacts 
             // that we will submit in a ModuleDataEvent for additional processing.
             Collection<BlackboardArtifact> usbBBartifacts = new ArrayList<>();
-
+            // Add all "ssid" dataType nodes to collection of BlackboardArtifacts 
+            // that we will submit in a ModuleDataEvent for additional processing.
+            Collection<BlackboardArtifact> wifiBBartifacts = new ArrayList<>();
             for (int i = 0; i < len; i++) {
                 Element tempnode = (Element) children.item(i);
 
@@ -734,6 +736,7 @@ private boolean parseAutopsyPluginOutput(String regFilePath, AbstractFile regFil
                                             bbart.addAttributes(bbattributes);
                                             // index the artifact for keyword search
                                             this.indexArtifact(bbart);
+                                            wifiBBartifacts.add(bbart);
                                         } catch (TskCoreException ex) {
                                             logger.log(Level.SEVERE, "Error adding SSID artifact to blackboard."); //NON-NLS
                                         }
@@ -756,6 +759,9 @@ private boolean parseAutopsyPluginOutput(String regFilePath, AbstractFile regFil
             if (!usbBBartifacts.isEmpty()) {
                 IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_DEVICE_ATTACHED, usbBBartifacts));
             }
+            if (!wifiBBartifacts.isEmpty()){
+                IngestServices.getInstance().fireModuleDataEvent(new ModuleDataEvent(moduleName, BlackboardArtifact.ARTIFACT_TYPE.TSK_WIFI_NETWORK, wifiBBartifacts));
+            }
             return true;
         } catch (FileNotFoundException ex) {
             logger.log(Level.SEVERE, "Error finding the registry file."); //NON-NLS
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java
index d1874d40108ff187955c48a4d47c0fdc94e534c5..6cb51795f306575bf60b0b8c207285d171740677 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Firefox.java
@@ -35,6 +35,7 @@
 import org.openide.util.NbBundle;
 import org.sleuthkit.autopsy.casemodule.services.FileManager;
 import org.sleuthkit.autopsy.coreutils.Logger;
+import org.sleuthkit.autopsy.coreutils.NetworkUtils;
 import org.sleuthkit.autopsy.datamodel.ContentUtils;
 import org.sleuthkit.autopsy.ingest.IngestJobContext;
 import org.sleuthkit.autopsy.ingest.IngestServices;
@@ -669,6 +670,6 @@ private String extractDomain(String url) {
             return null;
         }
         
-        return Util.extractDomain(url);
+        return NetworkUtils.extractDomain(url);
     }
 }
diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java
index 9bff0673940404f79243256a3ff7ddb813edb81e..fd9630cebd5d52cee4b84b04de2ec097f22cd79e 100644
--- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java
+++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/Util.java
@@ -84,83 +84,6 @@ public static String readFile(String path) throws IOException {
         }
     }
 
-    /**
-     * 
-     * @param url
-     * @return empty string if no domain could be found
-     */
-    private static String getBaseDomain(String url) {
-        String host = null;
-        
-        //strip protocol
-        String cleanUrl = url.replaceFirst(".*:\\/\\/", "");
-
-        //strip after slashes
-        String dirToks[] = cleanUrl.split("\\/");
-        if (dirToks.length > 0) {
-            host = dirToks[0];
-        } else {
-            host = cleanUrl;
-        }
-
-        //get the domain part from host (last 2)
-        StringTokenizer tok = new StringTokenizer(host, ".");
-        StringBuilder hostB = new StringBuilder();
-        int toks = tok.countTokens();
-
-        for (int count = 0; count < toks; ++count) {
-            String part = tok.nextToken();
-            int diff = toks - count;
-            if (diff < 3) {
-                hostB.append(part);
-            }
-            if (diff == 2) {
-                hostB.append(".");
-            }
-        }
-        
-        
-        String base = hostB.toString();
-        // verify there are no special characters in there
-        if (base.matches(".*[~`!@#$%^&\\*\\(\\)\\+={}\\[\\];:\\?<>,/ ].*")) {
-            return "";
-        }
-        return base;
-    }
-
-    /**
-     * 
-     * @param value
-     * @return empty string if no domain name was found
-     */
-    public static String extractDomain(String value) {
-        if (value == null) {
-            return "";
-
-        }
-        String result = "";
-        // String domainPattern = "(\\w+)\\.(AC|AD|AE|AERO|AF|AG|AI|AL|AM|AN|AO|AQ|AR|ARPA|AS|ASIA|AT|AU|AW|AX|AZ|BA|BB|BD|BE|BF|BG|BH|BI|BIZ|BJ|BM|BN|BO|BR|BS|BT|BV|BW|BY|BZ|CA|CAT|CC|CD|CF|CG|CH|CI|CK|CL|CM|CN|CO|COM|COOP|CR|CU|CV|CW|CX|CY|CZ|DE|DJ|DK|DM|DO|DZ|EC|EDU|EE|EG|ER|ES|ET|EU|FI|FJ|FK|FM|FO|FR|GA|GB|GD|GE|GF|GG|GH|GI|GL|GM|GN|GOV|GP|GQ|GR|GS|GT|GU|GW|GY|HK|HM|HN|HR|HT|HU|ID|IE|IL|IM|IN|INFO|INT|IO|IQ|IR|IS|IT|JE|JM|JO|JOBS|JP|KE|KG|KH|KI|KM|KN|KP|KR|KW|KY|KZ|LA|LB|LC|LI|LK|LR|LS|LT|LU|LV|LY|MA|MC|MD|ME|MG|MH|MIL|MK|ML|MM|MN|MO|MOBI|MP|MQ|MR|MS|MT|MU|MUSEUM|MV|MW|MX|MY|MZ|NA|NAME|NC|NE|NET|NF|NG|NI|NL|NO|NP|NR|NU|NZ|OM|ORG|PA|PE|PF|PG|PH|PK|PL|PM|PN|PR|PRO|PS|PT|PW|PY|QA|RE|RO|RS|RU|RW|SA|SB|SC|SD|SE|SG|SH|SI|SJ|SK|SL|SM|SN|SO|SR|ST|SU|SV|SX|SY|SZ|TC|TD|TEL|TF|TG|TH|TJ|TK|TL|TM|TN|TO|TP|TR|TRAVEL|TT|TV|TW|TZ|UA|UG|UK|US|UY|UZ|VA|VC|VE|VG|VI|VN|VU|WF|WS|XXX|YE|YT|ZA|ZM|ZW(co\\.[a-z].))";
-        //  Pattern p = Pattern.compile(domainPattern,Pattern.CASE_INSENSITIVE);
-        //  Matcher m = p.matcher(value);
-        //  while (m.find()) {
-        //  result = value.substring(m.start(0),m.end(0));
-        //  }
-
-        try {
-            URL url = new URL(value);
-            result = url.getHost();
-        } catch (MalformedURLException ex) {
-            //do not log if not a valid URL, and handle later
-            //Logger.getLogger(Util.class.getName()).log(Level.SEVERE, null, ex);
-        }
-
-        //was not a valid URL, try a less picky method
-        if (result == null || result.trim().isEmpty()) {
-            return getBaseDomain(value);
-        }
-        return result;
-    }
-
     public static String getFileName(String value) {
         String filename = "";
         String filematch = "^([a-zA-Z]\\:)(\\\\[^\\\\/:*?<>\"|]*(?<!\\[ \\]))*(\\.[a-zA-Z]{2,6})$"; //NON-NLS
diff --git a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties
index 91571da9bf06d2c7255119e69dae7749312ebe8e..202947f0332cf57e29745ecbf1052c347d437c8d 100644
--- a/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties
+++ b/branding/core/core.jar/org/netbeans/core/startup/Bundle.properties
@@ -1,5 +1,5 @@
 #Updated by build script
-#Sat, 13 Oct 2018 21:02:18 -0400
+#Tue, 13 Nov 2018 17:30:09 -0500
 LBL_splash_window_title=Starting Autopsy
 SPLASH_HEIGHT=314
 SPLASH_WIDTH=538
@@ -8,4 +8,4 @@ SplashRunningTextBounds=0,289,538,18
 SplashRunningTextColor=0x0
 SplashRunningTextFontSize=19
 
-currentVersion=Autopsy 4.9.0
+currentVersion=Autopsy 4.9.1
diff --git a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties
index 90fb6cf2768571767ca85c2a7077b22ed51f9ba8..11be8888476d9cffbc5b31d4e61bbc1112f499c7 100644
--- a/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties
+++ b/branding/modules/org-netbeans-core-windows.jar/org/netbeans/core/windows/view/ui/Bundle.properties
@@ -1,4 +1,4 @@
 #Updated by build script
-#Sat, 13 Oct 2018 21:02:18 -0400
-CTL_MainWindow_Title=Autopsy 4.9.0
-CTL_MainWindow_Title_No_Project=Autopsy 4.9.0
+#Tue, 13 Nov 2018 17:30:09 -0500
+CTL_MainWindow_Title=Autopsy 4.9.1
+CTL_MainWindow_Title_No_Project=Autopsy 4.9.1
diff --git a/release/update_autopsy_version.pl b/release/update_autopsy_version.pl
deleted file mode 100755
index 0a115c8dafb7220c423f600dffca6af9b6952a41..0000000000000000000000000000000000000000
--- a/release/update_autopsy_version.pl
+++ /dev/null
@@ -1,268 +0,0 @@
-#!/usr/bin/perl
-
-# Updates various Autopsy version numbers 
-
-use strict;
-use File::Copy;
-
-# global variables
-my $VER;
-
-
-my $TESTING = 0;
-print "TESTING MODE (no commits)\n" if ($TESTING);
-
-
-
-sub main {
-
-	# Get the Autopsy version argument
-	if (scalar (@ARGV) != 1) {
-	    print stderr "Missing release version argument (i.e.  4.9.0)\n";
-	    exit;
-	}
-	
-	$VER = $ARGV[0];
-	die "Invalid version number: $VER (1.2.3 or 1.2.3b1 expected)" unless ($VER =~ /^\d+\.\d+\.\d+(b\d+)?$/);
-	
-	
-	my $AUT_RELNAME = "autopsy-${VER}";
-	# Verify the tag doesn't already exist
-	exec_pipe(*OUT, "git tag | grep \"${AUT_RELNAME}\$\"");
-	my $foo = read_pipe_line(*OUT);
-	if ($foo ne "") {
-		print "Tag ${AUT_RELNAME} already exists\n";
-		print "Remove with 'git tag -d ${AUT_RELNAME}'\n";
-		die "stopping";
-	}
-	close(OUT);
-	
-	# Assume we running out of 'release' folder
-	chdir ".." or die "Error changing directories to root";
-	
-	
-	# verify_precheckin();
-	
-	
-	# Update the version info in that tag
-	update_project_properties();
-	update_doxygen_dev();
-	update_doxygen_user();
-	
-	print "Files updated.  You need to commit and push them\n";
-}
-
-
-
-
-
-######################################################
-# Utility functions
-
-
-# Function to execute a command and send output to pipe
-# returns handle
-# exec_pipe(HANDLE, CMD);
-sub exec_pipe {
-    my $handle = shift(@_);
-    my $cmd    = shift(@_);
-
-    die "Can't open pipe for exec_pipe"
-      unless defined(my $pid = open($handle, '-|'));
-
-    if ($pid) {
-        return $handle;
-    }
-    else {
-        $| = 1;
-        exec("$cmd") or die "Can't exec program: $!";
-    }
-}
-
-# Read a line of text from an open exec_pipe handle
-sub read_pipe_line {
-    my $handle = shift(@_);
-    my $out;
-
-    for (my $i = 0; $i < 100; $i++) {
-        $out = <$handle>;
-        return $out if (defined $out);
-    }
-    return $out;
-}
-
-
-# Prompt user for argument and return response
-sub prompt_user {
-    my $q = shift(@_);
-    print "$q: ";
-    $| = 1;
-    $_ = <STDIN>;
-    chomp;
-    return $_;
-}
-
-
-
-#############################################
-# File update methods
-
-
-
-# Verify that all files in the current source directory
-# are checked in.  dies if any are modified.
-sub verify_precheckin {
-
-    #system ("git pull");
-
-    print "Verifying everything is checked in\n";
-    exec_pipe(*OUT, "git status -s | grep \"^ M\"");
-
-    my $foo = read_pipe_line(*OUT);
-    if ($foo ne "") {
-        print "Files not checked in\n";
-        while ($foo ne "") {
-            print "$foo";
-            $foo = read_pipe_line(*OUT);
-        }
-        die "stopping" unless ($TESTING);
-    }
-    close(OUT);
-
-    print "Verifying everything is pushed\n";
-    exec_pipe(*OUT, "git status -sb | grep \"^##\" | grep \"ahead \"");
-    my $foo = read_pipe_line(*OUT);
-    if ($foo ne "") {
-            print "$foo";
-        print "Files not pushed to remote\n";
-        die "stopping" unless ($TESTING);
-    }
-    close(OUT);
-}
-
-
-
-# update the version in nbproject/project.properties 
-sub update_project_properties {
-
-    my $orig = "project.properties";
-    my $temp = "${orig}-bak";
-
-    print "Updating the version in ${orig}\n";
-    
-    chdir "nbproject" or die "cannot change into nbproject directory";
-    
-
-    open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
-    open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
-
-    my $found = 0;
-    while (<CONF_IN>) {
-        if (/^app\.version=/) {
-            print CONF_OUT "app.version=$VER\n";
-            $found++;
-        }
-        else {
-            print CONF_OUT $_;
-        }
-    }
-    close (CONF_IN);
-    close (CONF_OUT);
-
-    if ($found != 1) {
-        die "$found (instead of 1) occurrences of app.version found in ${orig}";
-    }
-
-    unlink ($orig) or die "Error deleting ${orig}";
-    rename ($temp, $orig) or die "Error renaming tmp $orig file";
-    system("git add ${orig}") unless ($TESTING);
-    chdir ".." or die "Error changing directories back to root";
-}
-
-
-
-# update the dev docs
-sub update_doxygen_dev {
-
-    my $orig = "Doxyfile";
-    my $temp = "${orig}-bak";
-
-    print "Updating the version in ${orig} (Dev)\n";
-    
-    chdir "docs/doxygen" or die "cannot change into docs/doxygen directory";
-    
-
-    open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
-    open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
-
-    my $found = 0;
-    while (<CONF_IN>) {
-        if (/^PROJECT_NUMBER/) {
-            print CONF_OUT "PROJECT_NUMBER = ${VER}\n";
-            $found++;
-        }
-        elsif (/^HTML_OUTPUT/) {
-            print CONF_OUT "HTML_OUTPUT = api-docs/${VER}/\n";
-            $found++;
-        }     
-        else {
-            print CONF_OUT $_;
-        }
-    }
-    close (CONF_IN);
-    close (CONF_OUT);
-
-    if ($found != 2) {
-        die "$found (instead of 2) occurrences of version found in (DEV) ${orig}";
-    }
-
-    unlink ($orig) or die "Error deleting ${orig}";
-    rename ($temp, $orig) or die "Error renaming tmp $orig file";
-    system("git add ${orig}") unless ($TESTING);
-    chdir "../.." or die "Error changing directories back to root";
-}
-
-
-# update the user docs 
-sub update_doxygen_user {
-
-    my $orig = "Doxyfile";
-    my $temp = "${orig}-bak";
-
-    print "Updating the version in ${orig} (User)\n";
-    
-    chdir "docs/doxygen-user" or die "cannot change into docs/doxygen-user directory";
-    
-
-    open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
-    open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
-
-    my $found = 0;
-    while (<CONF_IN>) {
-        if (/^PROJECT_NUMBER/) {
-            print CONF_OUT "PROJECT_NUMBER = ${VER}\n";
-            $found++;
-        }
-        elsif (/^HTML_OUTPUT/) {
-            print CONF_OUT "HTML_OUTPUT = ${VER}\n";
-            $found++;
-        }     
-        else {
-            print CONF_OUT $_;
-        }
-    }
-    close (CONF_IN);
-    close (CONF_OUT);
-
-    if ($found != 2) {
-        die "$found (instead of 2) occurrences of version found in (USER) ${orig}";
-    }
-
-    unlink ($orig) or die "Error deleting ${orig}";
-    rename ($temp, $orig) or die "Error renaming tmp $orig file";
-    system("git add ${orig}") unless ($TESTING);
-    chdir "../.." or die "Error changing directories back to root";
-}
-
-
-main();
\ No newline at end of file
diff --git a/release/update_sleuthkit_version.pl b/release/update_sleuthkit_version.pl
deleted file mode 100755
index e630e4890bb6c0666fea434a4c84fd6f8dbb6454..0000000000000000000000000000000000000000
--- a/release/update_sleuthkit_version.pl
+++ /dev/null
@@ -1,199 +0,0 @@
-#!/usr/bin/perl
-
-# Updates various TSK version numbers 
-# use this when the version of TSK that Autopsy depends on changes
-
-use strict;
-use File::Copy;
-
-# global variables
-my $VER;
-
-my $TESTING = 0;
-print "TESTING MODE (no commits)\n" if ($TESTING);
-
-
-sub main {
-
-	# Get the TSK version argument
-	if (scalar (@ARGV) != 1) {
-	    print stderr "Missing release version argument (i.e.  4.9.0)\n";
-	    exit;
-	}
-	
-	$VER = $ARGV[0];
-	die "Invalid version number: $VER (1.2.3 or 1.2.3b1 expected)" unless ($VER =~ /^\d+\.\d+\.\d+(b\d+)?$/);
-	
-	# Assume we running out of 'release' folder
-	chdir ".." or die "Error changing directories to root";
-	
-	# Update the version info in that tag
-	update_tsk_version();
-	update_core_project_properties();
-	update_core_project_xml();
-	
-	print "Files updated.  You need to commit and push them\n";
-}
-
-
-
-
-
-######################################################
-# Utility functions
-
-
-# Function to execute a command and send output to pipe
-# returns handle
-# exec_pipe(HANDLE, CMD);
-sub exec_pipe {
-    my $handle = shift(@_);
-    my $cmd    = shift(@_);
-
-    die "Can't open pipe for exec_pipe"
-      unless defined(my $pid = open($handle, '-|'));
-
-    if ($pid) {
-        return $handle;
-    }
-    else {
-        $| = 1;
-        exec("$cmd") or die "Can't exec program: $!";
-    }
-}
-
-# Read a line of text from an open exec_pipe handle
-sub read_pipe_line {
-    my $handle = shift(@_);
-    my $out;
-
-    for (my $i = 0; $i < 100; $i++) {
-        $out = <$handle>;
-        return $out if (defined $out);
-    }
-    return $out;
-}
-
-
-
-#############################################
-# File update methods
-
-
-
-# update the tskversion.xml
-sub update_tsk_version {
-
-    my $orig = "TSKVersion.xml";
-    my $temp = "${orig}-bak";
-
-    print "Updating the version in ${orig}\n";
-    
-    open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
-    open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
-
-    my $found = 0;
-    while (<CONF_IN>) {
-        if (/name="TSK_VERSION" value=/) {
-            print CONF_OUT "    <property name=\"TSK_VERSION\" value=\"${VER}\"/>\n";
-            $found++;
-        }
-        else {
-            print CONF_OUT $_;
-        }
-    }
-    close (CONF_IN);
-    close (CONF_OUT);
-
-    if ($found != 1) {
-        die "$found (instead of 1) occurrences of app.version found in ${orig}";
-    }
-
-    unlink ($orig) or die "Error deleting ${orig}";
-    rename ($temp, $orig) or die "Error renaming tmp $orig file";
-    system("git add ${orig}") unless ($TESTING);
-    
-}
-
-
-
-sub update_core_project_properties {
-
-    my $orig = "project.properties";
-    my $temp = "${orig}-bak";
-
-    print "Updating the version in ${orig}\n";
-    
-    chdir "Core/nbproject" or die "cannot change into Core/nbproject directory";
-    
-
-    open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
-    open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
-
-    my $found = 0;
-    while (<CONF_IN>) {
-        if (/^file\.reference\.sleuthkit\-postgresql-/) {
-            print CONF_OUT "file.reference.sleuthkit-postgresql-${VER}.jar=release/modules/ext/sleuthkit-postgresql-${VER}.jar\n";
-            $found++;
-        }
-            
-        else {
-            print CONF_OUT $_;
-        }
-    }
-    close (CONF_IN);
-    close (CONF_OUT);
-
-    if ($found != 1) {
-        die "$found (instead of 1) occurrences of version found in ${orig}";
-    }
-
-    unlink ($orig) or die "Error deleting ${orig}";
-    rename ($temp, $orig) or die "Error renaming tmp $orig file";
-    system("git add ${orig}") unless ($TESTING);
-    chdir "../.." or die "Error changing directories back to root";
-}
-
-sub update_core_project_xml {
-
-    my $orig = "project.xml";
-    my $temp = "${orig}-bak";
-
-    print "Updating the version in ${orig}\n";
-    
-    chdir "Core/nbproject" or die "cannot change into Core/nbproject directory";
-    
-    open (CONF_IN, "<${orig}") or die "Cannot open ${orig}";
-    open (CONF_OUT, ">${temp}") or die "Cannot open ${temp}";
-
-    my $found = 0;
-    while (<CONF_IN>) {
-        if (/<runtime-relative-path>ext\/sleuthkit-postgresql/) {
-            print CONF_OUT "                <runtime-relative-path>ext/sleuthkit-postgresql-${VER}.jar</runtime-relative-path>\n";
-            $found++;
-        }
-        elsif (/<binary-origin>release\/modules\/ext\/sleuthkit-postgresql/) {
-            print CONF_OUT "                <binary-origin>release/modules/ext/sleuthkit-postgresql-${VER}.jar</binary-origin>\n";
-            $found++;
-        }    
-        else {
-            print CONF_OUT $_;
-        }
-    }
-    close (CONF_IN);
-    close (CONF_OUT);
-
-    if ($found != 2) {
-        die "$found (instead of 2) occurrences of version found in ${orig}";
-    }
-
-    unlink ($orig) or die "Error deleting ${orig}";
-    rename ($temp, $orig) or die "Error renaming tmp $orig file";
-    system("git add ${orig}") unless ($TESTING);
-    chdir "../.." or die "Error changing directories back to root";
-}
-
-
-
-
-main();
\ No newline at end of file