diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java
index 6f522406abcb67ef7ee4c666f5ef688f06ce9df0..5279700b588a0d29dae8af762d9f6ede05caf3e3 100644
--- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java
+++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultPanel.java
@@ -40,6 +40,8 @@
 import javax.swing.event.ChangeEvent;
 import javax.swing.event.ChangeListener;
 import org.openide.explorer.ExplorerManager;
+import org.openide.nodes.Children;
+import org.openide.nodes.FilterNode;
 import org.openide.nodes.Node;
 import org.openide.nodes.NodeAdapter;
 import org.openide.nodes.NodeMemberEvent;
@@ -61,9 +63,10 @@
 import org.sleuthkit.autopsy.datamodel.NodeSelectionInfo;
 import org.sleuthkit.autopsy.ingest.IngestManager;
 import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultFetcher;
-import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.HashsetResultFetcher;
+import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.AnalysisResultSetFetcher;
 import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO.KeywordHitResultFetcher;
 import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
+import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactDAO.DataArtifactFetcher;
 import org.sleuthkit.autopsy.mainui.datamodel.DataArtifactSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
@@ -505,7 +508,11 @@ private void setNode(Node rootNode, boolean fullRefresh) {
             listeningToTabbedPane = true;
         }
 
-        this.currentRootNode = rootNode;
+        // if search result root node, it's fine; otherwise, wrap in result 
+        // viewer filter node to make sure there are no grandchildren
+        this.currentRootNode = (rootNode instanceof SearchResultRootNode) 
+                ? rootNode
+                : new ResultViewerFilterParentNode(rootNode);
 
         // if search result node clear out base child factory paging
         if (this.currentRootNode instanceof SearchResultRootNode) {
@@ -1274,24 +1281,25 @@ void displayFileSizes(FileTypeSizeSearchParams fileSizeKey) {
     }
 
     /**
-     * Displays results of querying the DAO for given search parameters query.
+     * Displays results of querying the DAO for given search parameters (set and
+     * artifact type) query.
      *
-     * @param hashHitKey The search parameter query.
+     * @param setKey The search parameter query.
      */
-    void displayHashHits(HashHitSearchParam hashHitKey) {
+    void displayAnalysisResultSet(AnalysisResultSetSearchParam setKey) {
         try {
-            this.searchResultManager = new SearchManager(new HashsetResultFetcher(hashHitKey), getPageSize());
+            this.searchResultManager = new SearchManager(new AnalysisResultSetFetcher(setKey), getPageSize());
             SearchResultsDTO results = searchResultManager.getResults();
             displaySearchResults(results, true);
         } catch (ExecutionException | IllegalArgumentException ex) {
             logger.log(Level.WARNING, MessageFormat.format(
                     "There was an error fetching data for hash set filter: {0} and data source id: {1}.",
-                    hashHitKey.getSetName(),
-                    hashHitKey.getDataSourceId() == null ? "<null>" : hashHitKey.getDataSourceId()),
+                    setKey.getSetName(),
+                    setKey.getDataSourceId() == null ? "<null>" : setKey.getDataSourceId()),
                     ex);
         }
     }
-    
+
     /**
      * Displays results of querying the DAO for the given search parameters
      * query.
@@ -1441,6 +1449,42 @@ private void updatePagingComponents() {
         }
     }
 
+    /**
+     * Children for a parent node in the result viewer that creates filter nodes
+     * with no children.
+     */
+    private class ResultViewerFilterChildren extends FilterNode.Children {
+
+        /**
+         * Main constructor.
+         *
+         * @param baseNode The parent node to wrap.
+         */
+        ResultViewerFilterChildren(Node baseNode) {
+            super(baseNode == null ? Node.EMPTY : baseNode);
+        }
+
+        @Override
+        protected Node[] createNodes(Node key) {
+            return new Node[]{new FilterNode(key, Children.LEAF)};
+        }
+    }
+
+    /**
+     * A parent node of items to display in the result viewer that shows no
+     * grandchildren.
+     */
+    private class ResultViewerFilterParentNode extends FilterNode {
+
+        /**
+         * Main constructor.
+         * @param original The original node to wrap.
+         */
+        ResultViewerFilterParentNode(Node original) {
+            super(original == null ? Node.EMPTY : original, new ResultViewerFilterChildren(original));
+        }
+    }
+
     /**
      * Listens for updates in page count for a BaseChildFactory.
      */
diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java
index 8845e4ae8bd81962c2214fa62a71d73536920a90..115f009b7d555ee32428ff53aa837a5ef72ba211 100644
--- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java
+++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultTopComponent.java
@@ -45,9 +45,9 @@
 import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.FileSystemContentSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.FileSystemHostSearchParam;
+import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
 import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
-import org.sleuthkit.autopsy.mainui.datamodel.HashHitSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.TagsSearchParams;
 
@@ -376,7 +376,7 @@ public void setNode(Node selectedNode) {
     /**
      * Displays results of querying the DAO for analysis results matching the
      * search parameters query.
-     * 
+     *
      * @param analysisResultParams The search parameter query.
      */
     public void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams) {
@@ -392,54 +392,53 @@ public void displayAnalysisResult(AnalysisResultSearchParam analysisResultParams
     public void displayDataArtifact(DataArtifactSearchParam dataArtifactParams) {
         dataResultPanel.displayDataArtifact(dataArtifactParams);
     }
-    
+
     /**
-     * Displays results of querying the DAO for files matching the mime
-     * search parameters query.
-     * 
+     * Displays results of querying the DAO for files matching the mime search
+     * parameters query.
+     *
      * @param fileMimeKey The search parameter query.
      */
     public void displayFileMimes(FileTypeMimeSearchParams fileMimeKey) {
         dataResultPanel.displayFileMimes(fileMimeKey);
     }
-    
+
     /**
-     * Displays results of querying the DAO for files matching the file extension
-     * search parameters query.
+     * Displays results of querying the DAO for files matching the file
+     * extension search parameters query.
      *
      * @param fileExtensionsParams The search parameter query.
      */
     public void displayFileExtensions(FileTypeExtensionsSearchParams fileExtensionsParams) {
         dataResultPanel.displayFileExtensions(fileExtensionsParams);
     }
-    
+
     /**
-     * Displays results of querying the DAO for files matching the file size 
+     * Displays results of querying the DAO for files matching the file size
      * search parameters query.
-     * 
+     *
      * @param fileSizeParams The search parameter query.
      */
     public void displayFileSizes(FileTypeSizeSearchParams fileSizeParams) {
         dataResultPanel.displayFileSizes(fileSizeParams);
-    } 
-    
-    /** Displays results of querying the DAO for hash sets matching the
-     * search parameters query.
-     * 
-     * @param hashHitParams The search parameter query.
+    }
+
+    /**
+     * Displays results of querying the DAO for an artifact type and set name.
+     * @param params The search parameters.
      */
-    public void displayHashHits(HashHitSearchParam hashHitParams) {
-        dataResultPanel.displayHashHits(hashHitParams);
+    public void displayAnalysisResultSet(AnalysisResultSetSearchParam params) {
+        dataResultPanel.displayAnalysisResultSet(params);
     }
-    
+
     /**
-     * Displays results of querying the DAO for keyword hits matching the
-     * search parameters query.
-     * 
+     * Displays results of querying the DAO for keyword hits matching the search
+     * parameters query.
+     *
      * @param keywordParams The search parameter query.
      */
     public void displayKeywordHits(KeywordHitSearchParam keywordParams) {
-       dataResultPanel.displayKeywordHits(keywordParams);
+        dataResultPanel.displayKeywordHits(keywordParams);
     }
     
     /**
diff --git a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java
index 9989078947a762e663db4fd2ae4ac8f5de4fab45..f844adccb8ec30879ee2740d3748d2072bd02cc3 100644
--- a/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java
+++ b/Core/src/org/sleuthkit/autopsy/corecomponents/DataResultViewerTable.java
@@ -1185,8 +1185,7 @@ public void actionPerformed(java.awt.event.ActionEvent evt) {
     })
     private void exportCSVButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_exportCSVButtonActionPerformed
         Node currentRoot = this.getExplorerManager().getRootContext();
-        // GVDTODO disabled for Search Result node
-        if (currentRoot != null && (!(currentRoot instanceof SearchResultRootNode)) && currentRoot.getChildren().getNodesCount() > 0) {
+        if (currentRoot != null && currentRoot.getChildren().getNodesCount() > 0) {
             org.sleuthkit.autopsy.directorytree.ExportCSVAction.saveNodesToCSV(java.util.Arrays.asList(currentRoot.getChildren().getNodes()), this);
         } else {
             MessageNotifyUtil.Message.info(Bundle.DataResultViewerTable_exportCSVButtonActionPerformed_empty());
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED
index 37cbe5f4789becee1c516193bc7ba2fc78ce1eef..9119e5c0bbf7922410c70251d40825ef47f9f024 100755
--- a/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/Bundle.properties-MERGED
@@ -124,19 +124,12 @@ FileNode.getActions.openInExtViewer.text=Open in External Viewer  Ctrl+E
 FileNode.getActions.searchFilesSameMD5.text=Search for files with the same MD5 hash
 FileNode.getActions.viewFileInDir.text=View File in Directory
 FileNode.getActions.viewInNewWin.text=View Item in New Window
-FileSizeNode_counting_placeholder=\ (counting...)
 FileTypes.bgCounting.placeholder=\ (counting...)
 FileTypes.createSheet.name.desc=no description
 FileTypes.createSheet.name.displayName=Name
 FileTypes.createSheet.name.name=Name
 FileTypes.name.text=File Types
 FileTypesByMimeType.name.text=By MIME Type
-FileTypesByMimeTypeNode.createSheet.mediaSubtype.desc=no description
-FileTypesByMimeTypeNode.createSheet.mediaSubtype.displayName=Subtype
-FileTypesByMimeTypeNode.createSheet.mediaSubtype.name=Subtype
-FileTypesByMimeTypeNode.createSheet.mediaType.desc=no description
-FileTypesByMimeTypeNode.createSheet.mediaType.displayName=Type
-FileTypesByMimeTypeNode.createSheet.mediaType.name=Type
 GetSCOTask.occurrences.defaultDescription=No correlation properties found
 GetSCOTask.occurrences.multipleProperties=Multiple different correlation properties exist for this result
 HostGroupingNode_unknownHostNode_title=Unknown Host
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java
index ea97ef0edd3a7aad87a180f80a14152aaca117b6..ab84647dd4620434a19fcd82a13344361381609d 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/DisplayableItemNodeVisitor.java
@@ -27,11 +27,11 @@
 import org.sleuthkit.autopsy.commonpropertiessearch.InstanceDataSourceNode;
 import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsChildren.DeletedContentNode;
 import org.sleuthkit.autopsy.datamodel.DeletedContent.DeletedContentsNode;
-import org.sleuthkit.autopsy.datamodel.FileSize.FileSizeRootChildren.FileSizeNode;
 import org.sleuthkit.autopsy.datamodel.FileSize.FileSizeRootNode;
 import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesNode;
 import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
 import org.sleuthkit.autopsy.allcasessearch.CorrelationAttributeInstanceNode;
+import org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode;
 
 /**
  * Visitor pattern that goes over all nodes in the directory tree. This includes
@@ -72,17 +72,13 @@ public interface DisplayableItemNodeVisitor<T> {
 
     T visit(DataSourceGroupingNode dataSourceGroupingNode);
 
-    T visit(org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileExtensionNode fsfn);
-
     T visit(DeletedContentNode dcn);
 
     T visit(DeletedContentsNode dcn);
 
     T visit(FileSizeRootNode fsrn);
 
-    T visit(FileSizeNode fsn);
-
-    T visit(org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode sfn);
+    T visit(FileTypesByExtNode sfn);
 
     T visit(RecentFilesNode rfn);
 
@@ -172,10 +168,6 @@ public interface DisplayableItemNodeVisitor<T> {
 
     T visit(FileTypesByMimeType.ByMimeTypeNode ftByMimeTypeNode);
 
-    T visit(FileTypesByMimeType.MediaTypeNode ftByMimeTypeMediaType);
-
-    T visit(FileTypesByMimeType.MediaSubTypeNode ftByMimeTypeMediaSubType);
-
     T visit(EmptyNode.MessageNode emptyNode);
 
     /*
@@ -302,26 +294,11 @@ public T visit(Artifacts.BaseArtifactNode ecn) {
             return defaultVisit(ecn);
         }
 
-        @Override
-        public T visit(org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileExtensionNode fsfn) {
-            return defaultVisit(fsfn);
-        }
-
         @Override
         public T visit(FileTypesByMimeType.ByMimeTypeNode ftByMimeTypeNode) {
             return defaultVisit(ftByMimeTypeNode);
         }
 
-        @Override
-        public T visit(FileTypesByMimeType.MediaTypeNode ftByMimeTypeMediaTypeNode) {
-            return defaultVisit(ftByMimeTypeMediaTypeNode);
-        }
-
-        @Override
-        public T visit(FileTypesByMimeType.MediaSubTypeNode ftByMimeTypeMediaTypeNode) {
-            return defaultVisit(ftByMimeTypeMediaTypeNode);
-        }
-
         @Override
         public T visit(EmptyNode.MessageNode ftByMimeTypeEmptyNode) {
             return defaultVisit(ftByMimeTypeEmptyNode);
@@ -342,11 +319,6 @@ public T visit(FileSizeRootNode fsrn) {
             return defaultVisit(fsrn);
         }
 
-        @Override
-        public T visit(FileSizeNode fsn) {
-            return defaultVisit(fsn);
-        }
-
         @Override
         public T visit(org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode sfn) {
             return defaultVisit(sfn);
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileSize.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileSize.java
index f7faa4a7dba18055442af8211b4876c382d5d83e..950dbd896442f540a119a037873637cc16a589cf 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/FileSize.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileSize.java
@@ -18,37 +18,13 @@
  */
 package org.sleuthkit.autopsy.datamodel;
 
-import java.beans.PropertyChangeEvent;
-import java.beans.PropertyChangeListener;
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Observable;
-import java.util.Observer;
-import java.util.Set;
-import java.util.concurrent.ExecutionException;
-import java.util.logging.Level;
-import javax.swing.SwingWorker;
-import org.openide.nodes.ChildFactory;
 import org.openide.nodes.Children;
 import org.openide.nodes.Node;
 import org.openide.nodes.Sheet;
 import org.openide.util.NbBundle;
-import org.openide.util.NbBundle.Messages;
 import org.openide.util.lookup.Lookups;
-import org.sleuthkit.autopsy.casemodule.Case;
-import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.core.UserPreferences;
-import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
 import org.sleuthkit.autopsy.coreutils.Logger;
-import org.sleuthkit.autopsy.ingest.IngestManager;
-import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
-import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
-import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams.FileSizeFilter;
-import org.sleuthkit.autopsy.mainui.nodes.SelectionResponder;
-import org.sleuthkit.datamodel.SleuthkitCase;
-import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.datamodel.TskData;
+import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.FileSizeTypeFactory;
 
 /**
  * Files by Size View node and related child nodes
@@ -57,15 +33,9 @@ public class FileSize implements AutopsyVisitableItem {
 
     private static final Logger logger = Logger.getLogger(FileTypes.class.getName());
 
-    private SleuthkitCase skCase;
     private final long filteringDSObjId; // 0 if not filtering/grouping by data source
 
-    public FileSize(SleuthkitCase skCase) {
-        this(skCase, 0);
-    }
-
-    public FileSize(SleuthkitCase skCase, long dsObjId) {
-        this.skCase = skCase;
+    public FileSize(long dsObjId) {
         this.filteringDSObjId = dsObjId;
     }
 
@@ -74,10 +44,6 @@ public <T> T accept(AutopsyItemVisitor<T> visitor) {
         return visitor.visit(this);
     }
 
-    public SleuthkitCase getSleuthkitCase() {
-        return this.skCase;
-    }
-
     long filteringDataSourceObjId() {
         return this.filteringDSObjId;
     }
@@ -88,14 +54,21 @@ long filteringDataSourceObjId() {
     public static class FileSizeRootNode extends DisplayableItemNode {
 
         private static final String NAME = NbBundle.getMessage(FileSize.class, "FileSize.fileSizeRootNode.name");
+        
+        private final long dataSourceObjId;
 
-        FileSizeRootNode(SleuthkitCase skCase, long datasourceObjId) {
-            super(Children.create(new FileSizeRootChildren(skCase, datasourceObjId), true), Lookups.singleton(NAME));
+        FileSizeRootNode(long datasourceObjId) {
+            super(Children.create(new FileSizeTypeFactory(datasourceObjId > 0 ? datasourceObjId : null), true), Lookups.singleton(NAME));
             super.setName(NAME);
             super.setDisplayName(NAME);
             this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-size-16.png"); //NON-NLS
+            this.dataSourceObjId = datasourceObjId;
         }
 
+        public Node clone() {
+            return new FileSizeRootNode(this.dataSourceObjId);
+        }
+        
         @Override
         public boolean isLeafTypeNode() {
             return false;
@@ -127,277 +100,4 @@ public String getItemType() {
             return getClass().getName();
         }
     }
-
-    /*
-     * Makes the children for specific sizes
-     */
-    public static class FileSizeRootChildren extends ChildFactory<FileSizeFilter> {
-
-        private SleuthkitCase skCase;
-        private final long datasourceObjId;
-        private Observable notifier;
-
-        public FileSizeRootChildren(SleuthkitCase skCase, long datasourceObjId) {
-            this.skCase = skCase;
-            this.datasourceObjId = datasourceObjId;
-            notifier = new FileSizeRootChildrenObservable();
-        }
-
-        /**
-         * Listens for case and ingest invest. Updates observers when events are
-         * fired. Size-based nodes are listening to this for changes.
-         */
-        private static final class FileSizeRootChildrenObservable extends Observable {
-
-            private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(Case.Events.DATA_SOURCE_ADDED, Case.Events.CURRENT_CASE);
-            private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
-            private static final Set<IngestManager.IngestModuleEvent> INGEST_MODULE_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestModuleEvent.CONTENT_CHANGED);
-
-            FileSizeRootChildrenObservable() {
-                IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl);
-                IngestManager.getInstance().addIngestModuleEventListener(INGEST_MODULE_EVENTS_OF_INTEREST, pcl);
-                Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
-            }
-
-            private void removeListeners() {
-                deleteObservers();
-                IngestManager.getInstance().removeIngestJobEventListener(pcl);
-                IngestManager.getInstance().removeIngestModuleEventListener(pcl);
-                Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
-            }
-
-            private final PropertyChangeListener pcl = (PropertyChangeEvent evt) -> {
-                String eventType = evt.getPropertyName();
-
-                if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
-                    /**
-                     * Checking for a current case is a stop gap measure until a
-                     * different way of handling the closing of cases is worked
-                     * out. Currently, remote events may be received for a case
-                     * that is already closed.
-                     */
-                    try {
-                        // new file was added
-                        // @@@ could check the size here and only fire off updates if we know the file meets the min size criteria
-                        Case.getCurrentCaseThrows();
-                        update();
-                    } catch (NoCurrentCaseException notUsed) {
-                        /**
-                         * Case is closed, do nothing.
-                         */
-                    }
-                } else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
-                        || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())
-                        || eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
-                    /**
-                     * Checking for a current case is a stop gap measure until a
-                     * different way of handling the closing of cases is worked
-                     * out. Currently, remote events may be received for a case
-                     * that is already closed.
-                     */
-                    try {
-                        Case.getCurrentCaseThrows();
-                        update();
-                    } catch (NoCurrentCaseException notUsed) {
-                        /**
-                         * Case is closed, do nothing.
-                         */
-                    }
-                } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
-                    // case was closed. Remove listeners so that we don't get called with a stale case handle
-                    if (evt.getNewValue() == null) {
-                        removeListeners();
-                    }
-                }
-            };
-
-            private void update() {
-                setChanged();
-                notifyObservers();
-            }
-        }
-
-        @Override
-        protected boolean createKeys(List<FileSizeFilter> list) {
-            list.addAll(Arrays.asList(FileSizeFilter.values()));
-            return true;
-        }
-
-        @Override
-        protected Node createNodeForKey(FileSizeFilter key) {
-            return new FileSizeNode(skCase, key, notifier, datasourceObjId);
-        }
-
-        /*
-         * Node for a specific size range. Children are files.
-         */
-        public class FileSizeNode extends DisplayableItemNode implements SelectionResponder{
-
-            private final FileSizeFilter filter;
-            private final long datasourceObjId;
-            private long childCount = -1;
-            private final SleuthkitCase skCase;
-
-            // use version with observer instead so that it updates
-            @Deprecated
-            FileSizeNode(SleuthkitCase skCase, FileSizeFilter filter, long datasourceObjId) {
-                super(Children.LEAF,
-                        Lookups.fixed(filter.getDisplayName(),
-                                new FileTypeSizeSearchParams(
-                                        filter,
-                                        datasourceObjId > 0 ? datasourceObjId : null)));
-                this.filter = filter;
-                this.datasourceObjId = datasourceObjId;
-                this.skCase = skCase;
-                init();
-            }
-
-            /**
-             *
-             * @param skCase
-             * @param filter
-             * @param o               Observable that provides updates when
-             *                        events are fired
-             * @param datasourceObjId filter by data source, if configured in
-             *                        user preferences
-             */
-            FileSizeNode(SleuthkitCase skCase, FileSizeFilter filter, Observable o, long datasourceObjId) {
-                super(Children.LEAF,
-                        Lookups.fixed(filter.getDisplayName()));
-                this.filter = filter;
-                this.datasourceObjId = datasourceObjId;
-                this.skCase = skCase;
-                init();
-                o.addObserver(new FileSizeNodeObserver());
-            }
-            
-            @Override
-            public void respondSelection(DataResultTopComponent dataResultPanel) {
-                dataResultPanel.displayFileSizes(new FileTypeSizeSearchParams(
-                                        filter,
-                                        datasourceObjId > 0 ? datasourceObjId : null));
-            }
-
-            private void init() {
-                super.setName(filter.getName());
-
-                String tooltip = filter.getDisplayName();
-                this.setShortDescription(tooltip);
-                this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-size-16.png"); //NON-NLS
-
-                updateDisplayName();
-            }
-
-            @Override
-            public String getItemType() {
-                /**
-                 * Return getClass().getName() + filter.getName() if custom
-                 * settings are desired for different filters.
-                 */
-                return DisplayableItemNode.FILE_PARENT_NODE_KEY;
-            }
-
-            // update the display name when new events are fired
-            private class FileSizeNodeObserver implements Observer {
-
-                @Override
-                public void update(Observable o, Object arg) {
-                    updateDisplayName();
-                }
-            }
-
-            @Override
-            public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
-                return visitor.visit(this);
-            }
-
-            @Override
-            protected Sheet createSheet() {
-                Sheet sheet = super.createSheet();
-                Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
-                if (sheetSet == null) {
-                    sheetSet = Sheet.createPropertiesSet();
-                    sheet.put(sheetSet);
-                }
-
-                sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileSize.createSheet.filterType.name"),
-                        NbBundle.getMessage(this.getClass(), "FileSize.createSheet.filterType.displayName"),
-                        NbBundle.getMessage(this.getClass(), "FileSize.createSheet.filterType.desc"),
-                        filter.getDisplayName()));
-
-                return sheet;
-            }
-
-            @Override
-            public boolean isLeafTypeNode() {
-                return true;
-            }
-
-            private long calculateChildCount() {
-                try {
-                    return skCase.countFilesWhere(makeQuery(filter, datasourceObjId));
-                } catch (TskCoreException ex) {
-                    logger.log(Level.SEVERE, "Error getting files by size search view count", ex); //NON-NLS
-                    return 0;
-                }
-            }
-
-            @Messages("FileSizeNode_counting_placeholder= (counting...)")
-            private void updateDisplayName() {
-                //only show "(counting...)" the first time, otherwise it is distracting.
-                setDisplayName(filter.getDisplayName() + ((childCount < 0) ? Bundle.FileTypes_bgCounting_placeholder()
-                        : (" (" + childCount + ")"))); //NON-NLS
-                new SwingWorker<Long, Void>() {
-                    @Override
-                    protected Long doInBackground() throws Exception {
-                        return calculateChildCount();
-                    }
-
-                    @Override
-                    protected void done() {
-                        try {
-                            childCount = get();
-                            setDisplayName(filter.getDisplayName() + " (" + childCount + ")"); //NON-NLS
-                        } catch (InterruptedException | ExecutionException ex) {
-                            setDisplayName(filter.getDisplayName());
-                            logger.log(Level.WARNING, "Failed to get count of files for " + filter.getDisplayName(), ex); //NON-NLS
-                        }
-                    }
-                }.execute();
-
-            }
-
-            private String makeQuery(FileSizeFilter filter, long filteringDSObjId) {
-                String query;
-                switch (filter) {
-                    case SIZE_50_200:
-                        query = "(size >= 50000000 AND size < 200000000)"; //NON-NLS
-                        break;
-                    case SIZE_200_1000:
-                        query = "(size >= 200000000 AND size < 1000000000)"; //NON-NLS
-                        break;
-
-                    case SIZE_1000_:
-                        query = "(size >= 1000000000)"; //NON-NLS
-                        break;
-
-                    default:
-                        throw new IllegalArgumentException("Unsupported filter type to get files by size: " + filter); //NON-NLS
-                }
-
-                // Ignore unallocated block files.
-                query = query + " AND (type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() + ")"; //NON-NLS
-
-                // hide known files if specified by configuration
-                query += (UserPreferences.hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : ""); //NON-NLS
-
-                // filter by datasource if indicated in case preferences
-                if (filteringDSObjId > 0) {
-                    query += " AND data_source_obj_id = " + filteringDSObjId;
-                }
-
-                return query;
-            }
-        }
-    }
 }
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java
index 6535be734002d7f7ac8d5cbeaff5d69e162ead5b..4bf820762ab22a6a0c207d1d3fbeb1e56d69eb4d 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByExtension.java
@@ -18,221 +18,49 @@
  */
 package org.sleuthkit.autopsy.datamodel;
 
-import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams;
-import java.beans.PropertyChangeEvent;
-import java.beans.PropertyChangeListener;
-import java.util.Arrays;
-import java.util.EnumSet;
-import java.util.List;
-import java.util.Observable;
-import java.util.Set;
-import java.util.stream.Collectors;
-import org.apache.commons.lang.StringUtils;
-import org.openide.nodes.ChildFactory;
 import org.openide.nodes.Children;
 import org.openide.nodes.Node;
 import org.openide.nodes.Sheet;
 import org.openide.util.NbBundle;
 import org.openide.util.lookup.Lookups;
-import org.sleuthkit.autopsy.casemodule.Case;
-import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import org.sleuthkit.autopsy.core.UserPreferences;
-import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
-import org.sleuthkit.autopsy.coreutils.Logger;
-import org.sleuthkit.autopsy.mainui.datamodel.FileExtDocumentFilter;
-import org.sleuthkit.autopsy.mainui.datamodel.FileExtExecutableFilter;
-import org.sleuthkit.autopsy.mainui.datamodel.FileExtRootFilter;
-import org.sleuthkit.autopsy.ingest.IngestManager;
-import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
-import org.sleuthkit.datamodel.AbstractFile;
-import org.sleuthkit.datamodel.SleuthkitCase;
-import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.datamodel.TskData;
-import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
-import org.sleuthkit.autopsy.mainui.datamodel.FileExtSearchFilter;
-import org.sleuthkit.autopsy.mainui.nodes.SelectionResponder;
+import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory;
 
 /**
  * Filters database results by file extension.
  */
 public final class FileTypesByExtension implements AutopsyVisitableItem {
 
-    private final static Logger logger = Logger.getLogger(FileTypesByExtension.class.getName());
-    private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
     private final FileTypes typesRoot;
 
     public FileTypesByExtension(FileTypes typesRoot) {
         this.typesRoot = typesRoot;
     }
 
-    public SleuthkitCase getSleuthkitCase() {
-        try {
-            return Case.getCurrentCaseThrows().getSleuthkitCase();
-        } catch (NoCurrentCaseException ex) {
-            return null;
-        }
-    }
-
     @Override
     public <T> T accept(AutopsyItemVisitor<T> visitor) {
         return visitor.visit(this);
     }
 
     long filteringDataSourceObjId() {
-        return typesRoot.filteringDataSourceObjId();
-    }
-
-    /**
-     * Listens for case and ingest invest. Updates observers when events are
-     * fired. FileType and FileTypes nodes are all listening to this.
-     */
-    private class FileTypesByExtObservable extends Observable implements RefreshThrottler.Refresher {
-
-        private final PropertyChangeListener pcl;
-        private final Set<Case.Events> CASE_EVENTS_OF_INTEREST;
-        /**
-         * RefreshThrottler is used to limit the number of refreshes performed
-         * when CONTENT_CHANGED and DATA_ADDED ingest module events are
-         * received.
-         */
-        private final RefreshThrottler refreshThrottler = new RefreshThrottler(this);
-
-        private FileTypesByExtObservable() {
-            super();
-            this.CASE_EVENTS_OF_INTEREST = EnumSet.of(Case.Events.DATA_SOURCE_ADDED, Case.Events.CURRENT_CASE);
-            this.pcl = (PropertyChangeEvent evt) -> {
-                String eventType = evt.getPropertyName();
-                if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
-                        || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())
-                        || eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
-
-                    /**
-                     * Checking for a current case is a stop gap measure until a
-                     * different way of handling the closing of cases is worked
-                     * out. Currently, remote events may be received for a case
-                     * that is already closed.
-                     */
-                    try {
-                        Case.getCurrentCaseThrows();
-                        typesRoot.updateShowCounts();
-                        update();
-                    } catch (NoCurrentCaseException notUsed) {
-                        /**
-                         * Case is closed, do nothing.
-                         */
-                    }
-                } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
-                    // case was closed. Remove listeners so that we don't get called with a stale case handle
-                    if (evt.getNewValue() == null) {
-                        removeListeners();
-                    }
-                }
-            };
-
-            IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl);
-            refreshThrottler.registerForIngestModuleEvents();
-            Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
-        }
-
-        private void removeListeners() {
-            deleteObservers();
-            IngestManager.getInstance().removeIngestJobEventListener(pcl);
-            refreshThrottler.unregisterEventListener();
-            Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
-        }
-
-        private void update() {
-            setChanged();
-            notifyObservers();
-        }
-
-        @Override
-        public void refresh() {
-            typesRoot.updateShowCounts();
-            update();
-        }
-
-        @Override
-        public boolean isRefreshRequired(PropertyChangeEvent evt) {
-            String eventType = evt.getPropertyName();
-            if (eventType.equals(IngestManager.IngestModuleEvent.CONTENT_CHANGED.toString())) {
-
-                /**
-                 * Checking for a current case is a stop gap measure until a
-                 * different way of handling the closing of cases is worked out.
-                 * Currently, remote events may be received for a case that is
-                 * already closed.
-                 */
-                try {
-                    Case.getCurrentCaseThrows();
-                    /**
-                     * If a new file has been added but does not have an
-                     * extension there is nothing to do.
-                     */
-                    if ((evt.getOldValue() instanceof ModuleContentEvent) == false) {
-                        return false;
-                    }
-                    ModuleContentEvent moduleContentEvent = (ModuleContentEvent) evt.getOldValue();
-                    if ((moduleContentEvent.getSource() instanceof AbstractFile) == false) {
-                        return false;
-                    }
-                    AbstractFile abstractFile = (AbstractFile) moduleContentEvent.getSource();
-                    if (!abstractFile.getNameExtension().isEmpty()) {
-                        return true;
-                    }
-                } catch (NoCurrentCaseException ex) {
-                    /**
-                     * Case is closed, no refresh needed.
-                     */
-                    return false;
-                }
-            }
-            return false;
-        }
+        return this.typesRoot.filteringDataSourceObjId();
     }
 
-    private static final String FNAME = NbBundle.getMessage(FileTypesByExtNode.class, "FileTypesByExtNode.fname.text");
+    public static class FileTypesByExtNode extends DisplayableItemNode {
 
-    /**
-     * Node for root of file types view. Children are nodes for specific types.
-     */
-    class FileTypesByExtNode extends DisplayableItemNode {
+        private static final String FNAME = NbBundle.getMessage(FileTypesByExtNode.class, "FileTypesByExtNode.fname.text");
 
-        private final FileExtRootFilter filter;
+        private final long dataSourceId;
 
-        /**
-         *
-         * @param skCase
-         * @param filter null to display root node of file type tree, pass in
-         *               something to provide a sub-node.
-         */
-        FileTypesByExtNode(SleuthkitCase skCase, FileExtRootFilter filter) {
-            this(skCase, filter, null);
-        }
-
-        /**
-         *
-         * @param skCase
-         * @param filter
-         * @param o      Observable that was created by a higher-level node that
-         *               provides updates on events
-         */
-        private FileTypesByExtNode(SleuthkitCase skCase, FileExtRootFilter filter, FileTypesByExtObservable o) {
-
-            super(Children.create(new FileTypesByExtNodeChildren(skCase, filter, o), true),
-                    Lookups.singleton(filter == null ? FNAME : filter.getDisplayName()));
-            this.filter = filter;
-
-            // root node of tree
-            if (filter == null) {
-                super.setName(FNAME);
-                super.setDisplayName(FNAME);
-            } // sub-node in file tree (i.e. documents, exec, etc.)
-            else {
-                super.setName(filter.getDisplayName());
-                super.setDisplayName(filter.getDisplayName());
-            }
+        FileTypesByExtNode(long dataSourceId) {
+            super(Children.create(new ViewsTypeFactory.FileExtFactory(dataSourceId > 0 ? dataSourceId : null), true), Lookups.singleton(FNAME));
+            this.dataSourceId = dataSourceId;
+            super.setName(FNAME);
+            super.setDisplayName(FNAME);
             this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png"); //NON-NLS
+        } //NON-NLS
+
+        public Node clone() {
+            return new FileTypesByExtNode(dataSourceId);
         }
 
         @Override
@@ -253,194 +81,13 @@ protected Sheet createSheet() {
                 sheetSet = Sheet.createPropertiesSet();
                 sheet.put(sheetSet);
             }
-            if (filter != null && (filter.equals(FileExtRootFilter.TSK_DOCUMENT_FILTER) || filter.equals(FileExtRootFilter.TSK_EXECUTABLE_FILTER))) {
-                String extensions = "";
-                for (String ext : filter.getFilter()) {
-                    extensions += "'" + ext + "', ";
-                }
-                extensions = extensions.substring(0, extensions.lastIndexOf(','));
-                sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.fileExt.name"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.fileExt.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.fileExt.desc"), extensions));
-            } else {
-                sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.desc"), getDisplayName()));
-            }
+            sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.name"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.name.desc"), getDisplayName()));
             return sheet;
         }
 
         @Override
         public String getItemType() {
-            /**
-             * Because Documents and Executable are further expandable, their
-             * column order settings should be stored separately.
-             */
-            if (filter == null) {
-                return getClass().getName();
-            }
-            if (filter.equals(FileExtRootFilter.TSK_DOCUMENT_FILTER) || filter.equals(FileExtRootFilter.TSK_EXECUTABLE_FILTER)) {
-                return getClass().getName() + filter.getName();
-            }
             return getClass().getName();
         }
-
-    }
-
-    private class FileTypesByExtNodeChildren extends ChildFactory<FileExtSearchFilter> {
-
-        private final SleuthkitCase skCase;
-        private final FileExtRootFilter filter;
-        private final FileTypesByExtObservable notifier;
-
-        /**
-         *
-         * @param skCase
-         * @param filter Is null for root node
-         * @param o      Observable that provides updates based on events being
-         *               fired (or null if one needs to be created)
-         */
-        private FileTypesByExtNodeChildren(SleuthkitCase skCase, FileExtRootFilter filter, FileTypesByExtObservable o) {
-            super();
-            this.skCase = skCase;
-            this.filter = filter;
-            if (o == null) {
-                this.notifier = new FileTypesByExtObservable();
-            } else {
-                this.notifier = o;
-            }
-        }
-
-        @Override
-        protected boolean createKeys(List<FileExtSearchFilter> list) {
-            // root node
-            if (filter == null) {
-                list.addAll(Arrays.asList(FileExtRootFilter.values()));
-            } // document and executable has another level of nodes
-            else if (filter.equals(FileExtRootFilter.TSK_DOCUMENT_FILTER)) {
-                list.addAll(Arrays.asList(FileExtDocumentFilter.values()));
-            } else if (filter.equals(FileExtRootFilter.TSK_EXECUTABLE_FILTER)) {
-                list.addAll(Arrays.asList(FileExtExecutableFilter.values()));
-            }
-            return true;
-        }
-
-        @Override
-        protected Node createNodeForKey(FileExtSearchFilter key) {
-            // make new nodes for the sub-nodes
-            if (key.getName().equals(FileExtRootFilter.TSK_DOCUMENT_FILTER.getName())) {
-                return new FileTypesByExtNode(skCase, FileExtRootFilter.TSK_DOCUMENT_FILTER, notifier);
-            } else if (key.getName().equals(FileExtRootFilter.TSK_EXECUTABLE_FILTER.getName())) {
-                return new FileTypesByExtNode(skCase, FileExtRootFilter.TSK_EXECUTABLE_FILTER, notifier);
-            } else {
-                return new FileExtensionNode(key, skCase, notifier);
-            }
-        }
-    }
-
-    /**
-     * Node for a specific file type / extension. Children of it will be the
-     * files of that type.
-     */
-    final class FileExtensionNode extends FileTypes.BGCountUpdatingNode implements SelectionResponder {
-
-        private final FileExtSearchFilter filter;
-
-        /**
-         *
-         * @param filter Extensions that will be shown for this node
-         * @param skCase
-         * @param o      Observable that sends updates when the child factories
-         *               should refresh
-         */
-        FileExtensionNode(FileExtSearchFilter filter, SleuthkitCase skCase, FileTypesByExtObservable o) {
-            super(typesRoot, Children.LEAF,
-                    Lookups.fixed(filter.getDisplayName()));
-            
-            this.filter = filter;
-            super.setName(filter.getDisplayName());
-            updateDisplayName();
-            this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-filter-icon.png"); //NON-NLS
-
-            o.addObserver(this);
-        }
-        
-        @Override
-        public void respondSelection(DataResultTopComponent dataResultPanel) {
-            dataResultPanel.displayFileExtensions(new FileTypeExtensionsSearchParams(
-                    filter,
-                    filteringDataSourceObjId() > 0 ? filteringDataSourceObjId() : null));
-        }
-
-        @Override
-        public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
-            return visitor.visit(this);
-        }
-
-        @Override
-        protected Sheet createSheet() {
-            Sheet sheet = super.createSheet();
-            Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
-            if (sheetSet == null) {
-                sheetSet = Sheet.createPropertiesSet();
-                sheet.put(sheetSet);
-            }
-            sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.filterType.name"),
-                    NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.filterType.displayName"),
-                    NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.filterType.desc"),
-                    filter.getDisplayName()));
-
-            sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.fileExt.name"),
-                    NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.fileExt.displayName"),
-                    NbBundle.getMessage(this.getClass(), "FileTypesByExtNode.createSheet.fileExt.desc"),
-                    String.join(", ", filter.getFilter())));
-            return sheet;
-        }
-
-        @Override
-        public boolean isLeafTypeNode() {
-            return true;
-        }
-
-        /**
-         * Consider allowing different configurations for Images, Videos, etc
-         * (in which case we'd return getClass().getName() + filter.getName()
-         * for all filters).
-         */
-        @Override
-        public String getItemType() {
-            return DisplayableItemNode.FILE_PARENT_NODE_KEY;
-        }
-
-        @Override
-        String getDisplayNameBase() {
-            return filter.getDisplayName();
-        }
-
-        @Override
-        long calculateChildCount() throws TskCoreException {
-            try {
-                return Case.getCurrentCaseThrows().getSleuthkitCase().countFilesWhere(createQuery(filter));
-            } catch (NoCurrentCaseException ex) {
-                throw new TskCoreException("No open case.", ex);
-            }
-        }
-    }
-
-    private String createQuery(FileExtSearchFilter filter) {
-        if (filter.getFilter().isEmpty()) {
-            // We should never be given a search filter without extensions
-            // but if we are it is clearly a programming error so we throw 
-            // an IllegalArgumentException.
-            throw new IllegalArgumentException("Empty filter list passed to createQuery()"); // NON-NLS
-        }
-
-        return "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
-                + (UserPreferences.hideKnownFilesInViewsTree()
-                ? " AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")"
-                : " ")
-                + (filteringDataSourceObjId() > 0
-                        ? " AND data_source_obj_id = " + filteringDataSourceObjId()
-                        : " ")
-                + " AND (extension IN (" + filter.getFilter().stream()
-                        .map(String::toLowerCase)
-                        .map(s -> "'" + StringUtils.substringAfter(s, ".") + "'")
-                        .collect(Collectors.joining(", ")) + "))";
     }
 }
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java
index 8d626d8e13b36eb2ec7e2955e3a1d4464b2b3703..5bcec936c6f941acaddcb6e66d8211847a057da7 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/FileTypesByMimeType.java
@@ -18,40 +18,13 @@
  */
 package org.sleuthkit.autopsy.datamodel;
 
-import java.beans.PropertyChangeEvent;
-import java.beans.PropertyChangeListener;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
 import java.util.Observable;
-import java.util.Observer;
-import java.util.Set;
-import java.util.logging.Level;
-import org.apache.commons.lang3.StringUtils;
-import org.openide.nodes.ChildFactory;
 import org.openide.nodes.Children;
 import org.openide.nodes.Node;
-import org.openide.nodes.Sheet;
 import org.openide.util.NbBundle;
 import org.openide.util.lookup.Lookups;
-import org.sleuthkit.autopsy.casemodule.Case;
-import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
-import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTree;
-import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree;
-import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
 import org.sleuthkit.autopsy.coreutils.Logger;
-import org.sleuthkit.autopsy.ingest.IngestManager;
-import org.sleuthkit.datamodel.SleuthkitCase;
-import org.sleuthkit.datamodel.TskCoreException;
-import org.sleuthkit.datamodel.TskData;
-import org.sleuthkit.autopsy.guiutils.RefreshThrottler;
-import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
-import org.sleuthkit.autopsy.mainui.nodes.SelectionResponder;
+import org.sleuthkit.autopsy.mainui.nodes.ViewsTypeFactory.FileMimePrefixFactory;
 
 /**
  * Class which contains the Nodes for the 'By Mime Type' view located in the
@@ -63,119 +36,15 @@
 public final class FileTypesByMimeType extends Observable implements AutopsyVisitableItem {
 
     private final static Logger logger = Logger.getLogger(FileTypesByMimeType.class.getName());
-    private static final Set<IngestManager.IngestJobEvent> INGEST_JOB_EVENTS_OF_INTEREST = EnumSet.of(IngestManager.IngestJobEvent.COMPLETED, IngestManager.IngestJobEvent.CANCELLED);
-
-    /**
-     * The nodes of this tree will be determined dynamically by the mimetypes
-     * which exist in the database. This hashmap will store them with the media
-     * type as the key and a Map, from media subtype to count, as the value.
-     */
-    private final HashMap<String, Map<String, Long>> existingMimeTypeCounts = new HashMap<>();
+ 
     /**
      * Root of the File Types tree. Used to provide single answer to question:
      * Should the child counts be shown next to the nodes?
      */
     private final FileTypes typesRoot;
 
-    /**
-     * The pcl is in the class because it has the easiest mechanisms to add and
-     * remove itself during its life cycles.
-     */
-    private final PropertyChangeListener pcl;
-
-    private static final Set<Case.Events> CASE_EVENTS_OF_INTEREST = EnumSet.of(Case.Events.DATA_SOURCE_ADDED, Case.Events.CURRENT_CASE);
-
-    /**
-     * RefreshThrottler is used to limit the number of refreshes performed when
-     * CONTENT_CHANGED and DATA_ADDED ingest module events are received.
-     */
-    private final RefreshThrottler refreshThrottler;
-
-    /**
-     * Create the base expression used as the where clause in the queries for
-     * files by mime type. Filters out certain kinds of files and directories,
-     * and known/slack files based on user preferences.
-     *
-     * @return The base expression to be used in the where clause of queries for
-     * files by mime type.
-     */
-    private String createBaseWhereExpr() {
-        return "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
-                + " AND (type IN ("
-                + TskData.TSK_DB_FILES_TYPE_ENUM.FS.ordinal() + ","
-                + TskData.TSK_DB_FILES_TYPE_ENUM.CARVED.ordinal() + ","
-                + TskData.TSK_DB_FILES_TYPE_ENUM.DERIVED.ordinal() + ","
-                + TskData.TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.ordinal() + ","
-                + TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.ordinal()
-                + (hideSlackFilesInViewsTree() ? "" : ("," + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.ordinal()))
-                + "))"
-                + ((filteringDataSourceObjId() > 0) ? " AND data_source_obj_id = " + this.filteringDataSourceObjId() : " ")
-                + (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "");
-    }
-
-    private void removeListeners() {
-        deleteObservers();
-        IngestManager.getInstance().removeIngestJobEventListener(pcl);
-        refreshThrottler.unregisterEventListener();
-        Case.removeEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
-    }
-
-    /**
-     * Performs the query on the database to get all distinct MIME types of
-     * files in it, and populate the hashmap with those results.
-     */
-    private void populateHashMap() {
-        String query = "SELECT mime_type, count(*) AS count FROM tsk_files "
-                + " WHERE mime_type IS NOT null "
-                + " AND " + createBaseWhereExpr()
-                + " GROUP BY mime_type";
-        synchronized (existingMimeTypeCounts) {
-            existingMimeTypeCounts.clear();
-            try 
-                (SleuthkitCase.CaseDbQuery dbQuery = Case.getCurrentCaseThrows().getSleuthkitCase().executeQuery(query)) {
-                ResultSet resultSet = dbQuery.getResultSet();
-                while (resultSet.next()) {
-                    final String mime_type = resultSet.getString("mime_type"); //NON-NLS
-                    if (!mime_type.isEmpty()) {
-                        //if the mime_type contained multiple slashes then everything after the first slash will become the subtype
-                        final String mediaType = StringUtils.substringBefore(mime_type, "/");
-                        final String subType = StringUtils.removeStart(mime_type, mediaType + "/");
-                        if (!mediaType.isEmpty() && !subType.isEmpty()) {
-                            final long count = resultSet.getLong("count");
-                            existingMimeTypeCounts.computeIfAbsent(mediaType, t -> new HashMap<>())
-                                    .put(subType, count);
-                        }
-                    }
-                }
-            } catch (NoCurrentCaseException | TskCoreException | SQLException ex) {
-                logger.log(Level.SEVERE, "Unable to populate File Types by MIME Type tree view from DB: ", ex); //NON-NLS
-            }
-        }
-
-        setChanged();
-        notifyObservers();
-    }
-
     FileTypesByMimeType(FileTypes typesRoot) {
         this.typesRoot = typesRoot;
-        this.pcl = (PropertyChangeEvent evt) -> {
-            String eventType = evt.getPropertyName();
-            if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
-                    || eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())
-                    || eventType.equals(Case.Events.DATA_SOURCE_ADDED.toString())) {
-
-                refreshMimeTypes();
-            } else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
-                if (evt.getNewValue() == null) {
-                    removeListeners();
-                }
-            }
-        };
-        refreshThrottler = new RefreshThrottler(new FileTypesByMimeTypeRefresher());
-        IngestManager.getInstance().addIngestJobEventListener(INGEST_JOB_EVENTS_OF_INTEREST, pcl);
-        refreshThrottler.registerForIngestModuleEvents();
-        Case.addEventTypeSubscriber(CASE_EVENTS_OF_INTEREST, pcl);
-        populateHashMap();
     }
 
     @Override
@@ -187,149 +56,33 @@ long filteringDataSourceObjId() {
         return typesRoot.filteringDataSourceObjId();
     }
 
-    /**
-     * Method to check if the node in question is a ByMimeTypeNode which is
-     * empty.
-     *
-     * @param node the Node which you wish to check.
-     *
-     * @return True if originNode is an instance of ByMimeTypeNode and is empty,
-     * false otherwise.
-     */
-    public static boolean isEmptyMimeTypeNode(Node node) {
-        boolean isEmptyMimeNode = false;
-        if (node instanceof FileTypesByMimeType.ByMimeTypeNode && ((FileTypesByMimeType.ByMimeTypeNode) node).isEmpty()) {
-            isEmptyMimeNode = true;
-        }
-        return isEmptyMimeNode;
-
-    }
-
-    private void refreshMimeTypes() {
-        /**
-         * Checking for a current case is a stop gap measure until a different
-         * way of handling the closing of cases is worked out. Currently, remote
-         * events may be received for a case that is already closed.
-         */
-        try {
-            Case.getCurrentCaseThrows();
-            typesRoot.updateShowCounts();
-            populateHashMap();
-        } catch (NoCurrentCaseException notUsed) {
-            /**
-             * Case is closed, do nothing.
-             */
-        }
-    }
-
-    /**
-     * Responsible for updating the 'By Mime Type' view in the UI. See
-     * RefreshThrottler for more details.
-     */
-    private class FileTypesByMimeTypeRefresher implements RefreshThrottler.Refresher {
-
-        @Override
-        public void refresh() {
-            refreshMimeTypes();
-        }
-
-        @Override
-        public boolean isRefreshRequired(PropertyChangeEvent evt) {
-            return true;
-        }
-
-    }
-
     /**
      * Class which represents the root node of the "By MIME Type" tree, will
      * have children of each media type present in the database or no children
      * when the file detection module has not been run and MIME type is
      * currently unknown.
      */
-    class ByMimeTypeNode extends DisplayableItemNode {
+    public static class ByMimeTypeNode extends DisplayableItemNode {
 
         @NbBundle.Messages({"FileTypesByMimeType.name.text=By MIME Type"})
 
         final String NAME = Bundle.FileTypesByMimeType_name_text();
+        
+        private final long dataSourceId;
 
-        ByMimeTypeNode() {
-            super(Children.create(new ByMimeTypeNodeChildren(), true), Lookups.singleton(Bundle.FileTypesByMimeType_name_text()));
+        ByMimeTypeNode(long dataSourceId) {
+            super(Children.create(new FileMimePrefixFactory(
+                    dataSourceId > 0
+                    ? dataSourceId
+                    : null), true), Lookups.singleton(Bundle.FileTypesByMimeType_name_text()));
             super.setName(NAME);
             super.setDisplayName(NAME);
             this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png");
+            this.dataSourceId = dataSourceId;
         }
-
-        @Override
-        public boolean isLeafTypeNode() {
-            return false;
-        }
-
-        @Override
-        public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
-            return visitor.visit(this);
-        }
-
-        @Override
-        public String getItemType() {
-            return getClass().getName();
-        }
-
-        boolean isEmpty() {
-            synchronized (existingMimeTypeCounts) {
-                return existingMimeTypeCounts.isEmpty();
-            }
-        }
-    }
-
-    /**
-     * Creates the children for the "By MIME Type" node these children will each
-     * represent a distinct media type present in the DB
-     */
-    private class ByMimeTypeNodeChildren extends ChildFactory<String> implements Observer {
-
-        private ByMimeTypeNodeChildren() {
-            super();
-            addObserver(this);
-        }
-
-        @Override
-        protected boolean createKeys(List<String> mediaTypeNodes) {
-            final List<String> keylist;
-            synchronized (existingMimeTypeCounts) {
-                keylist = new ArrayList<>(existingMimeTypeCounts.keySet());
-            }
-            Collections.sort(keylist);
-            mediaTypeNodes.addAll(keylist);
-
-            return true;
-        }
-
-        @Override
-        protected Node createNodeForKey(String key) {
-            return new MediaTypeNode(key);
-        }
-
-        @Override
-        public void update(Observable o, Object arg) {
-            refresh(true);
-        }
-    }
-
-    /**
-     * The Media type node created by the ByMimeTypeNodeChildren and contains
-     * one of the unique media types present in the database for this case.
-     */
-    class MediaTypeNode extends DisplayableItemNode {
-
-        @NbBundle.Messages({"FileTypesByMimeTypeNode.createSheet.mediaType.name=Type",
-            "FileTypesByMimeTypeNode.createSheet.mediaType.displayName=Type",
-            "FileTypesByMimeTypeNode.createSheet.mediaType.desc=no description"})
-
-        MediaTypeNode(String name) {
-            super(Children.create(new MediaTypeNodeChildren(name), true), Lookups.singleton(name));
-            setName(name);
-            setDisplayName(name);
-            this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file_types.png");
+        
+        public Node clone() {
+            return new ByMimeTypeNode(dataSourceId);
         }
 
         @Override
@@ -342,134 +95,13 @@ public <T> T accept(DisplayableItemNodeVisitor<T> visitor) {
             return visitor.visit(this);
         }
 
-        @Override
-        protected Sheet createSheet() {
-            Sheet sheet = super.createSheet();
-            Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
-            if (sheetSet == null) {
-                sheetSet = Sheet.createPropertiesSet();
-                sheet.put(sheetSet);
-            }
-            sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaType.name"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaType.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaType.desc"), getDisplayName()));
-            return sheet;
-        }
-
-        @Override
-        public String getItemType() {
-            return getClass().getName();
-        }
-
-    }
-
-    /**
-     * Creates children for media type nodes, children will be MediaSubTypeNodes
-     * and represent one of the subtypes which are present in the database of
-     * their media type.
-     */
-    private class MediaTypeNodeChildren extends ChildFactory<String> implements Observer {
-
-        String mediaType;
-
-        MediaTypeNodeChildren(String name) {
-            addObserver(this);
-            this.mediaType = name;
-        }
-
-        @Override
-        protected boolean createKeys(List<String> mediaTypeNodes) {
-            mediaTypeNodes.addAll(existingMimeTypeCounts.get(mediaType).keySet());
-            return true;
-        }
-
-        @Override
-        protected Node createNodeForKey(String subtype) {
-            String mimeType = mediaType + "/" + subtype;
-            return new MediaSubTypeNode(mimeType);
-        }
-
-        @Override
-        public void update(Observable o, Object arg) {
-            refresh(true);
-        }
-
-    }
-
-    /**
-     * Node which represents the media sub type in the By MIME type tree, the
-     * media subtype is the portion of the MIME type following the /.
-     */
-    final class MediaSubTypeNode extends FileTypes.BGCountUpdatingNode implements SelectionResponder {
-
-        @NbBundle.Messages({"FileTypesByMimeTypeNode.createSheet.mediaSubtype.name=Subtype",
-            "FileTypesByMimeTypeNode.createSheet.mediaSubtype.displayName=Subtype",
-            "FileTypesByMimeTypeNode.createSheet.mediaSubtype.desc=no description"})
-        private final String mimeType;
-        private final String subType;
-
-        private MediaSubTypeNode(String mimeType) {
-            super(typesRoot, Children.LEAF, Lookups.fixed(mimeType));
-            this.mimeType = mimeType;
-            this.subType = StringUtils.substringAfter(mimeType, "/");
-            super.setName(mimeType);
-            super.setDisplayName(subType);
-            updateDisplayName();
-            this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/file-filter-icon.png"); //NON-NLS
-            addObserver(this);
-        }
-        
-        @Override
-        public void respondSelection(DataResultTopComponent dataResultPanel) {
-            dataResultPanel.displayFileMimes(new FileTypeMimeSearchParams(
-                    mimeType,
-                    filteringDataSourceObjId() > 0 ? filteringDataSourceObjId() : null));
-        }
-
-        /**
-         * This returns true because any MediaSubTypeNode that exists is going
-         * to be a bottom level node in the Tree view on the left of Autopsy.
-         *
-         * @return true
-         */
-        @Override
-        public boolean isLeafTypeNode() {
-            return true;
-        }
-
-        @Override
-        public <T> T accept(DisplayableItemNodeVisitor< T> visitor) {
-            return visitor.visit(this);
-        }
-
-        @Override
-        protected Sheet createSheet() {
-            Sheet sheet = super.createSheet();
-            Sheet.Set sheetSet = sheet.get(Sheet.PROPERTIES);
-            if (sheetSet == null) {
-                sheetSet = Sheet.createPropertiesSet();
-                sheet.put(sheetSet);
-            }
-            sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaSubtype.name"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaSubtype.displayName"), NbBundle.getMessage(this.getClass(), "FileTypesByMimeTypeNode.createSheet.mediaSubtype.desc"), getDisplayName()));
-            return sheet;
-        }
-
         @Override
         public String getItemType() {
             return getClass().getName();
         }
 
-        @Override
-        public void update(Observable o, Object arg) {
-            updateDisplayName();
-        }
-
-        @Override
-        String getDisplayNameBase() {
-            return subType;
-        }
-
-        @Override
-        long calculateChildCount() {
-            return existingMimeTypeCounts.get(StringUtils.substringBefore(mimeType, "/")).get(subType);
+        boolean isEmpty() {
+            return this.getChildren().getNodesCount(true) <= 0;
         }
     }
 }
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java b/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java
index 28c02d6b1a26d1bbfa94972d99e5c270fecd0ec7..743cab10d6a0dd41fd2a9f07674c91899695507e 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/HashsetHits.java
@@ -337,7 +337,7 @@ public HashsetNameNode(String hashSetName) {
         
         @Override
         public void respondSelection(DataResultTopComponent dataResultPanel) {
-            dataResultPanel.displayHashHits(new HashHitSearchParam(
+            dataResultPanel.displayAnalysisResultSet(new HashHitSearchParam(
                     filteringDSObjId > 0 ? filteringDSObjId : null,
                     hashSetName));
         }
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java b/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java
index 694d583cdf85d476f5d181cd84c2871710da36fe..54428a38862525d346d9f862da6c613bf06d9bd7 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/RootContentChildren.java
@@ -24,6 +24,8 @@
 import org.openide.nodes.Children;
 import org.openide.nodes.Node;
 import org.openide.util.NbBundle;
+import org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode;
+import org.sleuthkit.autopsy.datamodel.FileTypesByMimeType.ByMimeTypeNode;
 import org.sleuthkit.autopsy.datamodel.accounts.Accounts;
 import org.sleuthkit.datamodel.SleuthkitVisitableItem;
 
@@ -85,7 +87,7 @@ static class CreateAutopsyNodeVisitor extends AutopsyItemVisitor.Default<Abstrac
 
         @Override
         public AbstractNode visit(FileTypesByExtension sf) {
-            return sf.new FileTypesByExtNode(sf.getSleuthkitCase(), null);
+            return new FileTypesByExtNode(sf.filteringDataSourceObjId());
         }
 
         @Override
@@ -100,7 +102,7 @@ public AbstractNode visit(DeletedContent dc) {
 
         @Override
         public AbstractNode visit(FileSize dc) {
-            return new FileSize.FileSizeRootNode(dc.getSleuthkitCase(), dc.filteringDataSourceObjId());
+            return new FileSize.FileSizeRootNode(dc.filteringDataSourceObjId());
         }
 
         @Override
@@ -172,7 +174,7 @@ protected AbstractNode defaultVisit(AutopsyVisitableItem di) {
 
         @Override
         public AbstractNode visit(FileTypesByMimeType ftByMimeTypeItem) {
-            return ftByMimeTypeItem.new ByMimeTypeNode();
+            return new ByMimeTypeNode(ftByMimeTypeItem.filteringDataSourceObjId());
         }
 
         @Override
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java b/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java
index 423e58e1ed94ce3f93228f96dfce4144079a5f0d..09576f53127b51f1ae05fcc35acbd4e2c1ae9687 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/ViewsNode.java
@@ -46,7 +46,7 @@ public ViewsNode(SleuthkitCase sleuthkitCase, long dsObjId) {
                     // add it back in if we can filter the results to a more managable size. 
                     // new RecentFiles(sleuthkitCase),
                     new DeletedContent(sleuthkitCase, dsObjId),
-                    new FileSize(sleuthkitCase, dsObjId))
+                    new FileSize(dsObjId))
                 ),
                 Lookups.singleton(NAME)
             );
diff --git a/Core/src/org/sleuthkit/autopsy/datamodel/accounts/Accounts.java b/Core/src/org/sleuthkit/autopsy/datamodel/accounts/Accounts.java
index beac329f52fe6ef5db29a8ddba4635ac69b46c91..28954023371df2beb4d431ff8b18e36ced1b2a26 100644
--- a/Core/src/org/sleuthkit/autopsy/datamodel/accounts/Accounts.java
+++ b/Core/src/org/sleuthkit/autopsy/datamodel/accounts/Accounts.java
@@ -267,30 +267,32 @@ public String getItemType() {
 
         @Override
         protected long fetchChildCount(SleuthkitCase skCase) throws TskCoreException {
-            long count = 0;
-            String dataSourceFilterClause = (filteringDSObjId > 0)
-                    ? " AND " + filteringDSObjId + " IN (SELECT art.data_source_obj_id FROM blackboard_artifacts art WHERE art.artifact_id = attr.artifact_id)"
-                    : "";
-
             String accountTypesInUseQuery
-                    = "SELECT COUNT(attr.value_text) AS count"
-                    + " FROM blackboard_attributes attr"
-                    + " WHERE attr.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE.getTypeID()
-                    + " AND attr.artifact_type_id = " + BlackboardArtifact.Type.TSK_ACCOUNT.getTypeID()
-                    + dataSourceFilterClause
-                    + " GROUP BY attr.value_text";
+                    = "SELECT COUNT(*) AS count\n"
+                    + "FROM (\n"
+                    + "  SELECT MIN(blackboard_attributes.value_text) AS account_type\n"
+                    + "  FROM blackboard_artifacts\n"
+                    + "  LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n"
+                    + "  WHERE blackboard_artifacts.artifact_type_id = " + TSK_ACCOUNT.getTypeID() + "\n"
+                    + "  AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n"
+                    + "  AND blackboard_attributes.value_text IS NOT NULL\n"
+                    + getFilterByDataSourceClause() + "\n"
+                    + "  -- group by artifact_id to ensure only one account type per artifact\n"
+                    + "  GROUP BY blackboard_artifacts.artifact_id\n"
+                    + ") res\n";
 
             try (SleuthkitCase.CaseDbQuery executeQuery = skCase.executeQuery(accountTypesInUseQuery);
                     ResultSet resultSet = executeQuery.getResultSet()) {
 
                 if (resultSet.next()) {
-                    count = resultSet.getLong("count");
+                    return resultSet.getLong("count");
                 }
 
             } catch (TskCoreException | SQLException ex) {
                 LOGGER.log(Level.SEVERE, "Error querying for count of all account types", ex);
             }
-            return count;
+
+            return 0;
         }
 
     }
@@ -334,13 +336,18 @@ List<String> getTypes() {
          */
         private void update() {
             String accountTypesInUseQuery
-                    = "SELECT blackboard_attributes.value_text as account_type, COUNT(*) as count "
-                    + " FROM blackboard_artifacts " //NON-NLS
-                    + "      JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id " //NON-NLS
-                    + " WHERE blackboard_artifacts.artifact_type_id = " + TSK_ACCOUNT.getTypeID() //NON-NLS
-                    + " AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.ATTRIBUTE_TYPE.TSK_ACCOUNT_TYPE.getTypeID() //NON-NLS
-                    + getFilterByDataSourceClause()
-                    + " GROUP BY blackboard_attributes.value_text ";
+                    = "SELECT res.account_type, COUNT(*) AS count\n"
+                    + "FROM (\n"
+                    + "  SELECT MIN(blackboard_attributes.value_text) AS account_type\n"
+                    + "  FROM blackboard_artifacts\n"
+                    + "  LEFT JOIN blackboard_attributes ON blackboard_artifacts.artifact_id = blackboard_attributes.artifact_id\n"
+                    + "  WHERE blackboard_artifacts.artifact_type_id = " + TSK_ACCOUNT.getTypeID() + "\n"
+                    + "  AND blackboard_attributes.attribute_type_id = " + BlackboardAttribute.Type.TSK_ACCOUNT_TYPE.getTypeID() + "\n"
+                    + getFilterByDataSourceClause() + "\n"
+                    + "  -- group by artifact_id to ensure only one account type per artifact\n"
+                    + "  GROUP BY blackboard_artifacts.artifact_id\n"
+                    + ") res\n"
+                    + "GROUP BY res.account_type";
 
             try (SleuthkitCase.CaseDbQuery executeQuery = skCase.executeQuery(accountTypesInUseQuery);
                     ResultSet resultSet = executeQuery.getResultSet()) {
@@ -404,6 +411,7 @@ public void propertyChange(PropertyChangeEvent evt) {
                      */
                     try {
                         Case.getCurrentCaseThrows();
+                        accountTypeResults.update();
                         refresh(true);
                     } catch (NoCurrentCaseException notUsed) {
                         // Case is closed, do nothing.
@@ -1466,7 +1474,7 @@ protected Sheet createSheet() {
         @Override
         public Action[] getActions(boolean context) {
             Action[] actions = super.getActions(context);
-            ArrayList<Action> arrayList = new ArrayList<>();           
+            ArrayList<Action> arrayList = new ArrayList<>();
             try {
                 arrayList.addAll(DataModelActionsFactory.getActions(Accounts.this.skCase.getContentById(fileKey.getObjID()), false));
             } catch (TskCoreException ex) {
diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java
index 76a27e0134e5bb8487d4a9b67c9df1bc132fcc75..19ebb3ea5a6e7b0b86d46dd12c1b0f72dfdbc88e 100644
--- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java
+++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeFilterChildren.java
@@ -25,7 +25,6 @@
 import org.sleuthkit.autopsy.datamodel.DirectoryNode;
 import org.openide.nodes.FilterNode;
 import org.openide.nodes.Node;
-import org.openide.util.Lookup;
 import org.sleuthkit.autopsy.datamodel.AbstractAbstractFileNode;
 import org.sleuthkit.autopsy.datamodel.AbstractContentNode;
 import org.sleuthkit.autopsy.datamodel.AnalysisResults;
@@ -34,14 +33,16 @@
 import org.sleuthkit.autopsy.datamodel.DisplayableItemNode;
 import org.sleuthkit.autopsy.datamodel.DisplayableItemNodeVisitor;
 import org.sleuthkit.autopsy.datamodel.FileNode;
+import org.sleuthkit.autopsy.datamodel.FileSize;
 import org.sleuthkit.autopsy.datamodel.FileTypes.FileTypesNode;
+import org.sleuthkit.autopsy.datamodel.FileTypesByExtension.FileTypesByExtNode;
+import org.sleuthkit.autopsy.datamodel.FileTypesByMimeType;
 import org.sleuthkit.autopsy.datamodel.LayoutFileNode;
 import org.sleuthkit.autopsy.datamodel.LocalFileNode;
 import org.sleuthkit.autopsy.datamodel.LocalDirectoryNode;
 import org.sleuthkit.autopsy.datamodel.SlackFileNode;
 import org.sleuthkit.autopsy.datamodel.VirtualDirectoryNode;
 import org.sleuthkit.autopsy.datamodel.VolumeNode;
-import org.sleuthkit.autopsy.mainui.nodes.TreeNode;
 import org.sleuthkit.datamodel.AbstractFile;
 import org.sleuthkit.datamodel.BlackboardArtifact.ARTIFACT_TYPE;
 import org.sleuthkit.datamodel.Content;
@@ -93,6 +94,15 @@ protected Node[] createNodes(Node origNode) {
         } else if (origNode instanceof AnalysisResults.RootNode) {
             Node cloned = ((AnalysisResults.RootNode) origNode).clone();
             return new Node[]{cloned};
+        } else if (origNode instanceof FileTypesByExtNode) {
+            Node cloned = ((FileTypesByExtNode) origNode).clone();
+            return new Node[]{cloned};
+        } else if (origNode instanceof FileTypesByMimeType.ByMimeTypeNode) {
+            Node cloned = ((FileTypesByMimeType.ByMimeTypeNode) origNode).clone();
+            return new Node[]{cloned};            
+        } else if (origNode instanceof FileSize.FileSizeRootNode) {
+            Node cloned = ((FileSize.FileSizeRootNode) origNode).clone();
+            return new Node[]{cloned};            
         } else if (origNode == null || !(origNode instanceof DisplayableItemNode)) {
             return new Node[]{};
         }
diff --git a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java
index ca2583dd9b5e76af2d106f916c17dd6179e71926..2504e7c049651b544005c2aed79b16231198e22b 100644
--- a/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java
+++ b/Core/src/org/sleuthkit/autopsy/directorytree/DirectoryTreeTopComponent.java
@@ -878,7 +878,8 @@ void respondSelection(final Node[] oldNodes, final Node[] newNodes) {
                     Node drfn = new DataResultFilterNode(originNode, DirectoryTreeTopComponent.this.em);
                     if (originNode instanceof SelectionResponder) {
                         ((SelectionResponder) originNode).respondSelection(dataResult);
-                    } else if (FileTypesByMimeType.isEmptyMimeTypeNode(originNode)) {
+                    } else if (originNode instanceof FileTypesByMimeType.ByMimeTypeNode && 
+                            originNode.getChildren().getNodesCount(true) <= 0) {
                         //Special case for when File Type Identification has not yet been run and
                         //there are no mime types to populate Files by Mime Type Tree
                         EmptyNode emptyNode = new EmptyNode(Bundle.DirectoryTreeTopComponent_emptyMimeNode_text());
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java
index 91bbb8a60eb54f437d388d400feb71705fb39b70..aea8e47e47546823be08f46e9541cfd064d6f46d 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/AnalysisResultDAO.java
@@ -30,10 +30,8 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
-import java.util.function.BiFunction;
 import java.util.logging.Level;
 import java.util.stream.Collectors;
-import org.apache.commons.lang3.StringUtils;
 import org.openide.util.NbBundle;
 import org.openide.util.NbBundle.Messages;
 import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
@@ -46,6 +44,7 @@
 import org.sleuthkit.datamodel.Blackboard;
 import org.sleuthkit.datamodel.BlackboardArtifact;
 import org.sleuthkit.datamodel.BlackboardAttribute;
+import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement;
 import org.sleuthkit.datamodel.Content;
 import org.sleuthkit.datamodel.HostAddress;
 import org.sleuthkit.datamodel.Image;
@@ -129,7 +128,7 @@ public static Set<BlackboardArtifact.Type> getIgnoredTreeTypes() {
 
     // TODO We can probably combine all the caches at some point
     private final Cache<SearchParams<BlackboardArtifactSearchParam>, AnalysisResultTableSearchResultsDTO> analysisResultCache = CacheBuilder.newBuilder().maximumSize(1000).build();
-    private final Cache<SearchParams<HashHitSearchParam>, AnalysisResultTableSearchResultsDTO> hashHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
+    private final Cache<SearchParams<AnalysisResultSetSearchParam>, AnalysisResultTableSearchResultsDTO> setHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
     private final Cache<SearchParams<KeywordHitSearchParam>, AnalysisResultTableSearchResultsDTO> keywordHitCache = CacheBuilder.newBuilder().maximumSize(1000).build();
 
     private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchParams<BlackboardArtifactSearchParam> cacheKey) throws NoCurrentCaseException, TskCoreException {
@@ -137,15 +136,15 @@ private AnalysisResultTableSearchResultsDTO fetchAnalysisResultsForTable(SearchP
         SleuthkitCase skCase = getCase();
         Blackboard blackboard = skCase.getBlackboard();
         BlackboardArtifact.Type artType = cacheKey.getParamData().getArtifactType();
-        
+
         List<BlackboardArtifact> arts = new ArrayList<>();
         String pagedWhereClause = getWhereClause(cacheKey);
         arts.addAll(blackboard.getAnalysisResultsWhere(pagedWhereClause));
         blackboard.loadBlackboardAttributes(arts);
-        
+
         // Get total number of results
-        long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());  
-        
+        long totalResultsCount = getTotalResultsCount(cacheKey, arts.size());
+
         TableData tableData = createTableData(artType, arts);
         return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), totalResultsCount);
     }
@@ -163,23 +162,26 @@ private AnalysisResultTableSearchResultsDTO fetchSetNameHitsForTable(SearchParam
         if (dataSourceId != null) {
             originalWhereClause += " AND artifacts.data_source_obj_id = " + dataSourceId + " ";
         }
-        
+
+        String expectedSetName = cacheKey.getParamData().getSetName();
+
         List<BlackboardArtifact> allHashHits = new ArrayList<>();
         allHashHits.addAll(blackboard.getAnalysisResultsWhere(originalWhereClause));
         blackboard.loadBlackboardAttributes(allHashHits);
-        
+
         // Filter for the selected set
-        List<BlackboardArtifact> hashHits = new ArrayList<>();
+        List<BlackboardArtifact> arts = new ArrayList<>();
         for (BlackboardArtifact art : allHashHits) {
             BlackboardAttribute setNameAttr = art.getAttribute(BlackboardAttribute.Type.TSK_SET_NAME);
-            if ((setNameAttr != null) && cacheKey.getParamData().getSetName().equals(setNameAttr.getValueString())) {
-                hashHits.add(art);
+            if ((expectedSetName == null && setNameAttr == null)
+                    || (expectedSetName != null && setNameAttr != null && expectedSetName.equals(setNameAttr.getValueString()))) {
+                arts.add(art);
             }
         }
 
-        List<BlackboardArtifact> pagedArtifacts = getPaged(hashHits, cacheKey);
+        List<BlackboardArtifact> pagedArtifacts = getPaged(arts, cacheKey);
         TableData tableData = createTableData(artType, pagedArtifacts);
-        return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), hashHits.size());
+        return new AnalysisResultTableSearchResultsDTO(artType, tableData.columnKeys, tableData.rows, cacheKey.getStartItem(), arts.size());
     }
 
     @Override
@@ -269,19 +271,19 @@ public boolean isAnalysisResultsInvalidating(AnalysisResultSearchParam key, Modu
         return key.getArtifactType().equals(eventData.getBlackboardArtifactType());
     }
 
-    public AnalysisResultTableSearchResultsDTO getHashHitsForTable(HashHitSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
+    public AnalysisResultTableSearchResultsDTO getAnalysisResultSetHits(AnalysisResultSetSearchParam artifactKey, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
         if (artifactKey.getDataSourceId() != null && artifactKey.getDataSourceId() < 0) {
             throw new IllegalArgumentException(MessageFormat.format("Illegal data.  "
                     + "Data source id must be null or > 0.  "
                     + "Received data source id: {0}", artifactKey.getDataSourceId() == null ? "<null>" : artifactKey.getDataSourceId()));
         }
 
-        SearchParams<HashHitSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
+        SearchParams<AnalysisResultSetSearchParam> searchParams = new SearchParams<>(artifactKey, startItem, maxCount);
         if (hardRefresh) {
-            hashHitCache.invalidate(searchParams);
+            setHitCache.invalidate(searchParams);
         }
 
-        return hashHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams));
+        return setHitCache.get(searchParams, () -> fetchSetNameHitsForTable(searchParams));
     }
 
     // TODO - JIRA-8117
@@ -306,7 +308,7 @@ public void dropAnalysisResultCache() {
     }
 
     public void dropHashHitCache() {
-        hashHitCache.invalidateAll();
+        setHitCache.invalidateAll();
     }
 
     public void dropKeywordHitCache() {
@@ -348,110 +350,338 @@ public TreeResultsDTO<AnalysisResultSearchParam> getAnalysisResultCounts(Long da
         }
     }
 
-// GVDTODO code to use in a future PR
-//    /**
-//     *
-//     * @param type         The artifact type to filter on.
-//     * @param setNameAttr  The blackboard attribute denoting the set name.
-//     * @param dataSourceId The data source object id for which the results
-//     *                     should be filtered or null if no data source
-//     *                     filtering.
-//     *
-//     * @return A mapping of set names to their counts.
-//     *
-//     * @throws IllegalArgumentException
-//     * @throws ExecutionException
-//     */
-//    Map<String, Long> getSetCountsMap(BlackboardArtifact.Type type, BlackboardAttribute.Type setNameAttr, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
-//        if (dataSourceId != null && dataSourceId <= 0) {
-//            throw new IllegalArgumentException("Expected data source id to be > 0");
-//        }
-//
-//        try {
-//            // get artifact types and counts
-//            SleuthkitCase skCase = getCase();
-//            String query = " set_name, COUNT(*) AS count \n"
-//                    + "FROM ( \n"
-//                    + "  SELECT art.artifact_id, \n"
-//                    + "  (SELECT value_text \n"
-//                    + "    FROM blackboard_attributes attr \n"
-//                    + "    WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = " + setNameAttr.getTypeID() + " LIMIT 1) AS set_name \n"
-//                    + "	 FROM blackboard_artifacts art \n"
-//                    + "	 WHERE  art.artifact_type_id = " + type.getTypeID() + " \n"
-//                    + ((dataSourceId == null) ? "" : "  AND art.data_source_obj_id = " + dataSourceId + " \n")
-//                    + ") \n"
-//                    + "GROUP BY set_name";
-//
-//            Map<String, Long> setCounts = new HashMap<>();
-//            skCase.getCaseDbAccessManager().select(query, (resultSet) -> {
-//                try {
-//                    while (resultSet.next()) {
-//                        String setName = resultSet.getString("set_name");
-//                        long count = resultSet.getLong("count");
-//                        setCounts.put(setName, count);
-//                    }
-//                } catch (SQLException ex) {
-//                    logger.log(Level.WARNING, "An error occurred while fetching set name counts.", ex);
-//                }
-//            });
-//
-//            return setCounts;
-//        } catch (NoCurrentCaseException | TskCoreException ex) {
-//            throw new ExecutionException("An error occurred while fetching set counts", ex);
-//        }
-//    }
-//
-//    /**
-//     * Get counts for individual sets of the provided type to be used in the
-//     * tree view.
-//     *
-//     * @param type         The blackboard artifact type.
-//     * @param dataSourceId The data source object id for which the results
-//     *                     should be filtered or null if no data source
-//     *                     filtering.
-//     * @param nullSetName  For artifacts with no set, this is the name to
-//     *                     provide. If null, artifacts without a set name will
-//     *                     be ignored.
-//     * @param converter    Means of converting from data source id and set name
-//     *                     to an AnalysisResultSetSearchParam
-//     *
-//     * @return The sets along with counts to display.
-//     *
-//     * @throws IllegalArgumentException
-//     * @throws ExecutionException
-//     */
-//    private <T extends AnalysisResultSetSearchParam> TreeResultsDTO<T> getSetCounts(
-//            BlackboardArtifact.Type type,
-//            Long dataSourceId,
-//            String nullSetName,
-//            BiFunction<Long, String, T> converter) throws IllegalArgumentException, ExecutionException {
-//
-//        List<TreeItemDTO<T>> allSets
-//                = getSetCountsMap(type, BlackboardAttribute.Type.TSK_SET_NAME, dataSourceId).entrySet().stream()
-//                        .filter(entry -> nullSetName != null || entry.getKey() != null)
-//                        .map(entry -> {
-//                            return new TreeItemDTO<>(
-//                                    type.getTypeName(),
-//                                    converter.apply(dataSourceId, entry.getKey()),
-//                                    entry.getKey(),
-//                                    entry.getKey() == null ? nullSetName : entry.getKey(),
-//                                    entry.getValue());
-//                        })
-//                        .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName()))
-//                        .collect(Collectors.toList());
-//
-//        return new TreeResultsDTO<>(allSets);
-//    }
-//
-//    public TreeResultsDTO<HashHitSearchParam> getHashHitSetCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException {
-//        return getSetCounts(BlackboardArtifact.Type.TSK_HASHSET_HIT, dataSourceId, null, (dsId, setName) -> new HashHitSearchParam(dsId, setName));
-//    }
-//
-//    public TreeResultsDTO<AnalysisResultSetSearchParam> getSetCounts(BlackboardArtifact.Type type, Long dataSourceId, String nullSetName) throws IllegalArgumentException, ExecutionException {
-//        return getSetCounts(type, dataSourceId, nullSetName, (dsId, setName) -> new AnalysisResultSetSearchParam(type, dsId, setName));
-//    }
-
-    
+    /**
+     *
+     * @param type         The artifact type to filter on.
+     * @param setNameAttr  The blackboard attribute denoting the set name.
+     * @param dataSourceId The data source object id for which the results
+     *                     should be filtered or null if no data source
+     *                     filtering.
+     *
+     * @return A mapping of set names to their counts.
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    Map<String, Long> getSetCountsMap(BlackboardArtifact.Type type, BlackboardAttribute.Type setNameAttr, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
+        if (dataSourceId != null && dataSourceId <= 0) {
+            throw new IllegalArgumentException("Expected data source id to be > 0");
+        }
+
+        try {
+            // get artifact types and counts
+            SleuthkitCase skCase = getCase();
+            String query = " res.set_name, COUNT(*) AS count \n"
+                    + "FROM ( \n"
+                    + "  SELECT art.artifact_id, \n"
+                    + "  (SELECT value_text \n"
+                    + "    FROM blackboard_attributes attr \n"
+                    + "    WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = " + setNameAttr.getTypeID() + " LIMIT 1) AS set_name \n"
+                    + "	 FROM blackboard_artifacts art \n"
+                    + "	 WHERE  art.artifact_type_id = " + type.getTypeID() + " \n"
+                    + ((dataSourceId == null) ? "" : "  AND art.data_source_obj_id = " + dataSourceId + " \n")
+                    + ") res \n"
+                    + "GROUP BY res.set_name";
+
+            Map<String, Long> setCounts = new HashMap<>();
+            skCase.getCaseDbAccessManager().select(query, (resultSet) -> {
+                try {
+                    while (resultSet.next()) {
+                        String setName = resultSet.getString("set_name");
+                        long count = resultSet.getLong("count");
+                        setCounts.put(setName, count);
+                    }
+                } catch (SQLException ex) {
+                    logger.log(Level.WARNING, "An error occurred while fetching set name counts.", ex);
+                }
+            });
+
+            return setCounts;
+        } catch (NoCurrentCaseException | TskCoreException ex) {
+            throw new ExecutionException("An error occurred while fetching set counts", ex);
+        }
+    }
+
+    /**
+     * Get counts for individual sets of the provided type to be used in the
+     * tree view.
+     *
+     * @param type         The blackboard artifact type.
+     * @param dataSourceId The data source object id for which the results
+     *                     should be filtered or null if no data source
+     *                     filtering.
+     * @param nullSetName  For artifacts with no set, this is the name to
+     *                     provide. If null, artifacts without a set name will
+     *                     be ignored.
+     * @param converter    Means of converting from data source id and set name
+     *                     to an AnalysisResultSetSearchParam
+     *
+     * @return The sets along with counts to display.
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    public TreeResultsDTO<AnalysisResultSetSearchParam> getSetCounts(
+            BlackboardArtifact.Type type,
+            Long dataSourceId,
+            String nullSetName) throws IllegalArgumentException, ExecutionException {
+
+        List<TreeItemDTO<AnalysisResultSetSearchParam>> allSets
+                = getSetCountsMap(type, BlackboardAttribute.Type.TSK_SET_NAME, dataSourceId).entrySet().stream()
+                        .filter(entry -> nullSetName != null || entry.getKey() != null)
+                        .sorted((a, b) -> compareSetStrings(a.getKey(), b.getKey()))
+                        .map(entry -> {
+                            return new TreeItemDTO<>(
+                                    type.getTypeName(),
+                                    new AnalysisResultSetSearchParam(type, dataSourceId, entry.getKey()),
+                                    entry.getKey() == null ? 0 : entry.getKey(),
+                                    entry.getKey() == null ? nullSetName : entry.getKey(),
+                                    entry.getValue());
+                        })
+                        .collect(Collectors.toList());
+
+        return new TreeResultsDTO<>(allSets);
+    }
+
+    /**
+     * Compares set strings to properly order for the tree.
+     *
+     * @param a The first string.
+     * @param b The second string.
+     *
+     * @return The comparator result.
+     */
+    private int compareSetStrings(String a, String b) {
+        if (a == null && b == null) {
+            return 0;
+        } else if (a == null) {
+            return -1;
+        } else if (b == null) {
+            return 1;
+        } else {
+            return a.compareToIgnoreCase(b);
+        }
+    }
+
+    /**
+     * Returns the search term counts for a set name of keyword search results.
+     *
+     * @param setName      The set name.
+     * @param dataSourceId The data source id or null.
+     *
+     * @return The search terms and counts.
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    @Messages({
+        "# {0} - searchTerm",
+        "AnalysisResultDAO_getKeywordSearchTermCounts_exactMatch={0} (Exact)",
+        "# {0} - searchTerm",
+        "AnalysisResultDAO_getKeywordSearchTermCounts_substringMatch={0} (Substring)",
+        "# {0} - searchTerm",
+        "AnalysisResultDAO_getKeywordSearchTermCounts_regexMatch={0} (Regex)",})
+    public TreeResultsDTO<? extends KeywordSearchTermParams> getKeywordSearchTermCounts(String setName, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
+        if (dataSourceId != null && dataSourceId <= 0) {
+            throw new IllegalArgumentException("Expected data source id to be > 0");
+        }
+
+        String dataSourceClause = dataSourceId == null
+                ? ""
+                : "AND art.data_source_obj_id = ?\n";
+
+        String setNameClause = setName == null
+                ? "attr_res.set_name IS NULL"
+                : "attr_res.set_name = ?";
+
+        String query = "res.search_term,\n"
+                + "  res.search_type,\n"
+                + "  SUM(res.count) AS count,\n"
+                + "  -- when there are multiple keyword groupings, return true for has children\n"
+                + "  CASE\n"
+                + "    WHEN COUNT(*) > 1 THEN 1\n"
+                + "	ELSE 0\n"
+                + "  END AS has_children\n"
+                + "FROM (\n"
+                + "  -- get keyword value, search type, search term, and count grouped by (keyword, regex, search_type) "
+                + "  -- in order to determine if groupings have children\n"
+                + "  SELECT \n"
+                + "    attr_res.keyword, \n"
+                + "    attr_res.search_type,\n"
+                + "    COUNT(*) AS count,\n"
+                + "    CASE \n"
+                + "      WHEN attr_res.search_type = 0 OR attr_res.regexp_str IS NULL THEN \n"
+                + "        attr_res.keyword\n"
+                + "      ELSE \n"
+                + "        attr_res.regexp_str\n"
+                + "    END AS search_term\n"
+                + "  FROM (\n"
+                + "	-- get pertinent attribute values for artifacts\n"
+                + "    SELECT art.artifact_id, \n"
+                + "    (SELECT value_text FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_SET_NAME.getTypeID() + " LIMIT 1) AS set_name,\n"
+                + "    (SELECT value_int32 FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_KEYWORD_SEARCH_TYPE.getTypeID() + " LIMIT 1) AS search_type,\n"
+                + "    (SELECT value_text FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_KEYWORD_REGEXP.getTypeID() + " LIMIT 1) AS regexp_str,\n"
+                + "    (SELECT value_text FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_KEYWORD.getTypeID() + " LIMIT 1) AS keyword\n"
+                + "    FROM blackboard_artifacts art\n"
+                + "    WHERE  art.artifact_type_id = " + BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID() + "\n"
+                + dataSourceClause
+                + "  ) attr_res\n"
+                + "  WHERE " + setNameClause + "\n"
+                + "  GROUP BY attr_res.regexp_str, attr_res.keyword, attr_res.search_type\n"
+                + ") res\n"
+                + "GROUP BY res.search_term, res.search_type\n"
+                + "ORDER BY res.search_term, res.search_type";
+
+        // get artifact types and counts
+        try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) {
+
+            int paramIdx = 0;
+            if (dataSourceId != null) {
+                preparedStatement.setLong(++paramIdx, dataSourceId);
+            }
+
+            if (setName != null) {
+                preparedStatement.setString(++paramIdx, setName);
+            }
+
+            List<TreeItemDTO<KeywordSearchTermParams>> items = new ArrayList<>();
+            getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> {
+                try {
+                    while (resultSet.next()) {
+                        String searchTerm = resultSet.getString("search_term");
+                        int searchType = resultSet.getInt("search_type");
+                        long count = resultSet.getLong("count");
+                        boolean hasChildren = resultSet.getBoolean("has_children");
+
+                        String searchTermModified;
+                        switch (searchType) {
+                            case 0:
+                                searchTermModified = Bundle.AnalysisResultDAO_getKeywordSearchTermCounts_exactMatch(searchTerm == null ? "" : searchTerm);
+                                break;
+                            case 1:
+                                searchTermModified = Bundle.AnalysisResultDAO_getKeywordSearchTermCounts_substringMatch(searchTerm == null ? "" : searchTerm);
+                                break;
+                            case 2:
+                                searchTermModified = Bundle.AnalysisResultDAO_getKeywordSearchTermCounts_regexMatch(searchTerm == null ? "" : searchTerm);
+                                break;
+                            default:
+                                logger.log(Level.WARNING, MessageFormat.format("Non-standard search type value: {0}.", searchType));
+                                searchTermModified = searchTerm;
+                                break;
+                        }
+
+                        items.add(new TreeItemDTO<>(
+                                "KEYWORD_SEARCH_TERMS",
+                                new KeywordSearchTermParams(setName, searchTerm, searchType, hasChildren, dataSourceId),
+                                searchTermModified,
+                                searchTermModified,
+                                count
+                        ));
+                    }
+                } catch (SQLException ex) {
+                    logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex);
+                }
+            });
+
+            return new TreeResultsDTO<>(items);
+
+        } catch (SQLException | NoCurrentCaseException | TskCoreException ex) {
+            throw new ExecutionException("An error occurred while fetching set counts", ex);
+        }
+    }
+
+    /**
+     * Get counts for string matches of a particular regex/substring search
+     * term.
+     *
+     * @param setName      The set name or null if no set name.
+     * @param regexStr     The regex string. Must be non-null.
+     * @param searchType   The value for the search type attribute.
+     * @param dataSourceId The data source id or null.
+     *
+     * @return The results
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    public TreeResultsDTO<? extends KeywordMatchParams> getKeywordMatchCounts(String setName, String regexStr, int searchType, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
+        if (dataSourceId != null && dataSourceId <= 0) {
+            throw new IllegalArgumentException("Expected data source id to be > 0");
+        }
+
+        String dataSourceClause = dataSourceId == null
+                ? ""
+                : "AND data_source_obj_id = ?\n";
+
+        String setNameClause = setName == null
+                ? "res.set_name IS NULL"
+                : "res.set_name = ?";
+
+        String query = "keyword, \n"
+                + "  COUNT(*) AS count \n"
+                + "FROM (\n"
+                + "  SELECT art.artifact_id, \n"
+                + "  (SELECT value_text FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_SET_NAME.getTypeID() + " LIMIT 1) AS set_name,\n"
+                + "  (SELECT value_int32 FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_KEYWORD_SEARCH_TYPE.getTypeID() + " LIMIT 1) AS search_type,\n"
+                + "  (SELECT value_text FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_KEYWORD_REGEXP.getTypeID() + " LIMIT 1) AS regexp_str,\n"
+                + "  (SELECT value_text FROM blackboard_attributes attr WHERE attr.artifact_id = art.artifact_id AND attr.attribute_type_id = "
+                + BlackboardAttribute.Type.TSK_KEYWORD.getTypeID() + " LIMIT 1) AS keyword\n"
+                + "  FROM blackboard_artifacts art\n"
+                + "  WHERE art.artifact_type_id = " + BlackboardArtifact.Type.TSK_KEYWORD_HIT.getTypeID() + "\n"
+                + dataSourceClause
+                + ") res\n"
+                + "-- TODO replace\n"
+                + "WHERE " + setNameClause + "\n"
+                + "AND res.regexp_str = ?\n"
+                + "AND res.search_type = ?\n"
+                + "GROUP BY keyword";
+
+        try (CaseDbPreparedStatement preparedStatement = getCase().getCaseDbAccessManager().prepareSelect(query)) {
+            // get artifact types and counts
+            int paramIdx = 0;
+            if (dataSourceId != null) {
+                preparedStatement.setLong(++paramIdx, dataSourceId);
+            }
+
+            if (setName != null) {
+                preparedStatement.setString(++paramIdx, setName);
+            }
+
+            preparedStatement.setString(++paramIdx, regexStr);
+            preparedStatement.setInt(++paramIdx, searchType);
+
+            List<TreeItemDTO<KeywordMatchParams>> items = new ArrayList<>();
+            getCase().getCaseDbAccessManager().select(preparedStatement, (resultSet) -> {
+                try {
+                    while (resultSet.next()) {
+                        String keyword = resultSet.getString("keyword");
+                        long count = resultSet.getLong("count");
+
+                        items.add(new TreeItemDTO<>(
+                                "KEYWORD_MATCH",
+                                new KeywordMatchParams(setName, regexStr, keyword, searchType, dataSourceId),
+                                keyword,
+                                keyword == null ? "" : keyword,
+                                count));
+                    }
+                } catch (SQLException ex) {
+                    logger.log(Level.WARNING, "An error occurred while fetching results from result set.", ex);
+                }
+            });
+
+            return new TreeResultsDTO<>(items);
+        } catch (NoCurrentCaseException | TskCoreException | SQLException ex) {
+            throw new ExecutionException("An error occurred while fetching keyword counts", ex);
+        }
+    }
+
     /**
      * Handles basic functionality of fetching and paging of analysis results.
      */
@@ -500,20 +730,20 @@ public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hard
     /**
      * Handles fetching and paging of hashset hits.
      */
-    public static class HashsetResultFetcher extends AbstractAnalysisResultFetcher<HashHitSearchParam> {
+    public static class AnalysisResultSetFetcher extends AbstractAnalysisResultFetcher<AnalysisResultSetSearchParam> {
 
         /**
          * Main constructor.
          *
          * @param params Parameters to handle fetching of data.
          */
-        public HashsetResultFetcher(HashHitSearchParam params) {
+        public AnalysisResultSetFetcher(AnalysisResultSetSearchParam params) {
             super(params);
         }
 
         @Override
         public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
-            return MainDAO.getInstance().getAnalysisResultDAO().getHashHitsForTable(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
+            return MainDAO.getInstance().getAnalysisResultDAO().getAnalysisResultSetHits(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
         }
     }
 
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED
index 602b270a71c4d9d87c94765603e07072329431b7..ab71f5d900b4620532957774f57741b2ada412c3 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/Bundle.properties-MERGED
@@ -13,6 +13,12 @@ AnalysisResultDAO.columnKeys.score.name=Score
 AnalysisResultDAO.columnKeys.sourceType.description=Source Type
 AnalysisResultDAO.columnKeys.sourceType.displayName=Source Type
 AnalysisResultDAO.columnKeys.sourceType.name=SourceType
+# {0} - searchTerm
+AnalysisResultDAO_getKeywordSearchTermCounts_exactMatch={0} (Exact)
+# {0} - searchTerm
+AnalysisResultDAO_getKeywordSearchTermCounts_regexMatch={0} (Regex)
+# {0} - searchTerm
+AnalysisResultDAO_getKeywordSearchTermCounts_substringMatch={0} (Substring)
 BlackboardArtifactDAO.columnKeys.comment.description=Comment
 BlackboardArtifactDAO.columnKeys.comment.displayName=C
 BlackboardArtifactDAO.columnKeys.comment.name=Comment
@@ -81,6 +87,17 @@ FileSystemColumnUtils.volumeColumns.length=Length in Sectors
 FileSystemColumnUtils.volumeColumns.startingSector=Starting Sector
 FileTag.name.text=File Tag
 FileTypesByMimeType.name.text=By MIME Type
+OsAccounts.name.text=OS Accounts
+OsAccountsDAO.createSheet.comment.displayName=C
+OsAccountsDAO.createSheet.count.displayName=O
+OsAccountsDAO.createSheet.score.displayName=S
+OsAccountsDAO.fileColumns.noDescription=No Description
+OsAccountsDAO_accountHostNameProperty_displayName=Host
+OsAccountsDAO_accountNameProperty_displayName=Name
+OsAccountsDAO_accountRealmNameProperty_displayName=Realm Name
+OsAccountsDAO_accountScopeNameProperty_displayName=Scope
+OsAccountsDAO_createdTimeProperty_displayName=Creation Time
+OsAccountsDAO_loginNameProperty_displayName=Login Name
 ResultTag.name.text=Result Tag
 TagsDAO.fileColumns.accessTimeColLbl=Accessed Time
 TagsDAO.fileColumns.changeTimeColLbl=Changed Time
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java
index 9801467ec65299ca4ab229b93dbdc0a0b2daf8bd..c9bc49c5e64b05af91c42985d3506845ac2e4f35 100755
--- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/FileTypeSizeSearchParams.java
@@ -26,17 +26,21 @@
 public class FileTypeSizeSearchParams {
 
     public enum FileSizeFilter {
-        SIZE_50_200(0, "SIZE_50_200", "50 - 200MB"), //NON-NLS
-        SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB"), //NON-NLS
-        SIZE_1000_(2, "SIZE_1000+", "1GB+"); //NON-NLS
+        SIZE_50_200(0, "SIZE_50_200", "50 - 200MB", 50_000_000L, 200_000_000L), //NON-NLS
+        SIZE_200_1000(1, "SIZE_200_1GB", "200MB - 1GB", 200_000_000L, 1_000_000_000L), //NON-NLS
+        SIZE_1000_(2, "SIZE_1000+", "1GB+", 1_000_000_000L, null); //NON-NLS
         private final int id;
         private final String name;
         private final String displayName;
+        private long minBound;
+        private Long maxBound;
 
-        private FileSizeFilter(int id, String name, String displayName) {
+        private FileSizeFilter(int id, String name, String displayName, long minBound, Long maxBound) {
             this.id = id;
             this.name = name;
             this.displayName = displayName;
+            this.minBound = minBound;
+            this.maxBound = maxBound;
         }
 
         public String getName() {
@@ -50,11 +54,26 @@ public int getId() {
         public String getDisplayName() {
             return this.displayName;
         }
+
+        /**
+         * @return The minimum inclusive bound (non-null).
+         */
+        public long getMinBound() {
+            return minBound;
+        }
+
+        /**
+         * @return The maximum exclusive bound (if null, no upper limit).
+         */
+        public Long getMaxBound() {
+            return maxBound;
+        }
+
     }
 
     private final FileSizeFilter sizeFilter;
     private final Long dataSourceId;
- 
+
     public FileTypeSizeSearchParams(FileSizeFilter sizeFilter, Long dataSourceId) {
         this.sizeFilter = sizeFilter;
         this.dataSourceId = dataSourceId;
@@ -96,6 +115,5 @@ public boolean equals(Object obj) {
         }
         return true;
     }
-    
-    
+
 }
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordMatchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordMatchParams.java
new file mode 100644
index 0000000000000000000000000000000000000000..13519394234f7612d2798027ccc805a9223d624c
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordMatchParams.java
@@ -0,0 +1,83 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.mainui.datamodel;
+
+/**
+ * Parameters for a keyword match found in files.
+ */
+public class KeywordMatchParams {
+
+    private final String setName;
+    private final String searchTerm;
+    private final String keywordMatch;
+    private final Long dataSourceId;
+    private final int searchType;
+
+    /**
+     * Main constructor.
+     *
+     * @param setName      The set name.
+     * @param searchTerm   The search term (determined from regex or keyword).
+     * @param keywordMatch The actual keyword match.
+     * @param searchType   The keyword search type.
+     * @param dataSourceId The data source id or null.
+     */
+    public KeywordMatchParams(String setName, String searchTerm, String keywordMatch, int searchType, Long dataSourceId) {
+        this.setName = setName;
+        this.searchTerm = searchTerm;
+        this.keywordMatch = keywordMatch;
+        this.searchType = searchType;
+        this.dataSourceId = dataSourceId;
+    }
+
+    /**
+     * @return The set name.
+     */
+    public String getSetName() {
+        return setName;
+    }
+
+    /**
+     * @return The search term (determined from regex or keyword).
+     */
+    public String getSearchTerm() {
+        return searchTerm;
+    }
+
+    /**
+     * @return The actual keyword match.
+     */
+    public String getKeywordMatch() {
+        return keywordMatch;
+    }
+
+    /**
+     * @return The data source id or null.
+     */
+    public Long getDataSourceId() {
+        return dataSourceId;
+    }
+
+    /**
+     * @return The type of keyword search performed.
+     */
+    public int getSearchType() {
+        return searchType;
+    }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordSearchTermParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordSearchTermParams.java
new file mode 100644
index 0000000000000000000000000000000000000000..c22a6c97b9b863b85c184b47575409a616303c52
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/KeywordSearchTermParams.java
@@ -0,0 +1,86 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.mainui.datamodel;
+
+/**
+ * Parameters for a keyword search term.
+ */
+public class KeywordSearchTermParams {
+
+    private final String setName;
+    private final String searchTerm;
+    private final boolean hasChildren;
+    private final Long dataSourceId;
+    private final int searchType;
+
+    /**
+     * Main constructor.
+     *
+     * @param setName      The set name.
+     * @param searchTerm   The search term (determined from regex or keyword).
+     * @param searchType   The keyword search type attribute.
+     * @param hasChildren  Whether or not this search term has children tree
+     *                     nodes (i.e. url regex search that further divides
+     *                     into different urls).
+     * @param dataSourceId The data source id or null.
+     */
+    public KeywordSearchTermParams(String setName, String searchTerm, int searchType, boolean hasChildren, Long dataSourceId) {
+        this.setName = setName;
+        this.searchTerm = searchTerm;
+        this.searchType = searchType;
+        this.hasChildren = hasChildren;
+        this.dataSourceId = dataSourceId;
+    }
+
+    /**
+     * @return The set name.
+     */
+    public String getSetName() {
+        return setName;
+    }
+
+    /**
+     * @return The search term (determined from regex or keyword).
+     */
+    public String getSearchTerm() {
+        return searchTerm;
+    }
+
+    /**
+     * @return Whether or not this search term has children tree nodes (i.e. url
+     *         regex search that further divides into different urls).
+     */
+    public boolean hasChildren() {
+        return hasChildren;
+    }
+
+    /**
+     * @return The data source id or null.
+     */
+    public Long getDataSourceId() {
+        return dataSourceId;
+    }
+
+    /**
+     * @return The keyword search type value.
+     */
+    public int getSearchType() {
+        return searchType;
+    }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java
index de0e4b5dddaed8812c5ae15b7845e1fffcf370d6..81a21ef749d40e5e93bb5efe2b6bf791bc347c59 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/MainDAO.java
@@ -39,6 +39,7 @@ public synchronized static MainDAO getInstance() {
     private final ViewsDAO viewsDAO = ViewsDAO.getInstance();
     private final FileSystemDAO fileSystemDAO = FileSystemDAO.getInstance();
     private final TagsDAO tagsDAO = TagsDAO.getInstance();
+    private final OsAccountsDAO accountsDAO = OsAccountsDAO.getInstance();
 
     public DataArtifactDAO getDataArtifactsDAO() {
         return dataArtifactDAO;
@@ -59,4 +60,8 @@ public FileSystemDAO getFileSystemDAO() {
     public TagsDAO getTagsDAO() {
         return tagsDAO;
     }
+    
+    public OsAccountsDAO getOsAccountsDAO() {
+        return accountsDAO;
+    }
 }
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java
new file mode 100755
index 0000000000000000000000000000000000000000..a5d34088fea4d90512f314ae61b888a63f9289c6
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsDAO.java
@@ -0,0 +1,198 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.mainui.datamodel;
+
+import com.google.common.cache.Cache;
+import com.google.common.cache.CacheBuilder;
+import java.beans.PropertyChangeEvent;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Optional;
+import java.util.concurrent.ExecutionException;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.openide.util.NbBundle;
+import org.openide.util.NbBundle.Messages;
+import org.sleuthkit.autopsy.casemodule.Case;
+import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
+import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
+import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
+import org.sleuthkit.datamodel.OsAccount;
+import org.sleuthkit.datamodel.TskCoreException;
+
+/**
+ * Provides information to populate the results viewer for data in the OS
+ * Accounts section.
+ */
+@Messages({
+    "OsAccountsDAO_accountNameProperty_displayName=Name",
+    "OsAccountsDAO_accountRealmNameProperty_displayName=Realm Name",
+    "OsAccountsDAO_accountHostNameProperty_displayName=Host",
+    "OsAccountsDAO_accountScopeNameProperty_displayName=Scope",
+    "OsAccountsDAO_createdTimeProperty_displayName=Creation Time",
+    "OsAccountsDAO_loginNameProperty_displayName=Login Name",
+    "OsAccountsDAO.createSheet.score.displayName=S",
+    "OsAccountsDAO.createSheet.comment.displayName=C",
+    "OsAccountsDAO.createSheet.count.displayName=O",
+    "OsAccountsDAO.fileColumns.noDescription=No Description",})
+public class OsAccountsDAO {
+
+    private static final int CACHE_SIZE = 5; // rule of thumb: 5 entries times number of cached SearchParams sub-types
+    private static final long CACHE_DURATION = 2;
+    private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
+    private final Cache<SearchParams<?>, SearchResultsDTO> searchParamsCache = CacheBuilder.newBuilder().maximumSize(CACHE_SIZE).expireAfterAccess(CACHE_DURATION, CACHE_DURATION_UNITS).build();
+
+    private static final String OS_ACCOUNTS_TYPE_ID = "OS_ACCOUNTS";
+
+    private static final List<ColumnKey> OS_ACCOUNTS_WITH_SCO_COLUMNS = Arrays.asList(
+            getFileColumnKey(Bundle.OsAccountsDAO_accountNameProperty_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_createSheet_score_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_createSheet_comment_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_createSheet_count_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_loginNameProperty_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_accountHostNameProperty_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_accountScopeNameProperty_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_accountRealmNameProperty_displayName()),
+            getFileColumnKey(Bundle.OsAccountsDAO_createdTimeProperty_displayName()));
+
+    private static OsAccountsDAO instance = null;
+
+    synchronized static OsAccountsDAO getInstance() {
+        if (instance == null) {
+            instance = new OsAccountsDAO();
+        }
+
+        return instance;
+    }
+
+    private static ColumnKey getFileColumnKey(String name) {
+        return new ColumnKey(name, name, Bundle.OsAccountsDAO_fileColumns_noDescription());
+    }
+
+    public SearchResultsDTO getAccounts(OsAccountsSearchParams key, long startItem, Long maxCount, boolean hardRefresh) throws ExecutionException, IllegalArgumentException {
+        if (key == null) {
+            throw new IllegalArgumentException("Search parameters are null");
+        } else if (key.getDataSourceId() != null && key.getDataSourceId() <= 0) {
+            throw new IllegalArgumentException("Data source id must be greater than 0 or null");
+        }
+
+        SearchParams<OsAccountsSearchParams> searchParams = new SearchParams<>(key, startItem, maxCount);
+        if (hardRefresh) {
+            this.searchParamsCache.invalidate(searchParams);
+        }
+
+        return searchParamsCache.get(searchParams, () -> fetchAccountsDTOs(searchParams));
+    }
+
+    /**
+     * Returns a list of paged OS Accounts results.
+     *
+     * @param accounts     The OS Accounts results.
+     * @param searchParams The search parameters including the paging.
+     *
+     * @return The list of paged OS Accounts results.
+     */
+    List<OsAccount> getPaged(List<OsAccount> accounts, SearchParams<?> searchParams) {
+        Stream<OsAccount> pagedAccountsStream = accounts.stream()
+                .sorted(Comparator.comparing((acct) -> acct.getId()))
+                .skip(searchParams.getStartItem());
+
+        if (searchParams.getMaxResultsCount() != null) {
+            pagedAccountsStream = pagedAccountsStream.limit(searchParams.getMaxResultsCount());
+        }
+
+        return pagedAccountsStream.collect(Collectors.toList());
+    }
+
+    @NbBundle.Messages({"OsAccounts.name.text=OS Accounts"})
+    private SearchResultsDTO fetchAccountsDTOs(SearchParams<OsAccountsSearchParams> cacheKey) throws NoCurrentCaseException, TskCoreException {
+
+        Long dataSourceId = cacheKey.getParamData().getDataSourceId();
+
+        // get all accounts
+        List<OsAccount> allAccounts = (dataSourceId != null && dataSourceId > 0)
+                ? Case.getCurrentCaseThrows().getSleuthkitCase().getOsAccountManager().getOsAccountsByDataSourceObjId(dataSourceId)
+                : Case.getCurrentCaseThrows().getSleuthkitCase().getOsAccountManager().getOsAccounts();
+
+        // get current page of accounts results
+        List<OsAccount> pagedAccounts = getPaged(allAccounts, cacheKey);
+
+        List<RowDTO> fileRows = new ArrayList<>();
+        for (OsAccount account : pagedAccounts) {
+
+            Optional<String> optional = account.getLoginName();
+            Optional<Long> creationTimeValue = account.getCreationTime();
+            String timeDisplayStr
+                    = creationTimeValue.isPresent() ? TimeZoneUtils.getFormattedTime(creationTimeValue.get()) : "";
+            List<Object> cellValues = Arrays.asList(
+                    account.getName() != null ? account.getName() : "",
+                    // GVDTODO handle SCO
+                    // GVDTODO only show if (!UserPreferences.getHideSCOColumns())
+                    null,
+                    null,
+                    // GVDTODO only show if central repository enabled
+                    null,
+                    optional.isPresent() ? optional.get() : "",
+                    "",
+                    "",
+                    "", // GVDTODO this is filled by a background GetOsAccountRealmTask task 
+                    timeDisplayStr);
+
+            fileRows.add(new BaseRowDTO(
+                    cellValues,
+                    OS_ACCOUNTS_TYPE_ID,
+                    account.getId()));
+        };
+
+        return new BaseSearchResultsDTO(OS_ACCOUNTS_TYPE_ID, Bundle.OsAccounts_name_text(), OS_ACCOUNTS_WITH_SCO_COLUMNS, fileRows, 0, allAccounts.size());
+    }
+
+    /**
+     * Handles fetching and paging of data for accounts.
+     */
+    public static class AccountFetcher extends DAOFetcher<OsAccountsSearchParams> {
+
+        /**
+         * Main constructor.
+         *
+         * @param params Parameters to handle fetching of data.
+         */
+        public AccountFetcher(OsAccountsSearchParams params) {
+            super(params);
+        }
+
+        @Override
+        public SearchResultsDTO getSearchResults(int pageSize, int pageIdx, boolean hardRefresh) throws ExecutionException {
+            return MainDAO.getInstance().getOsAccountsDAO().getAccounts(this.getParameters(), pageIdx * pageSize, (long) pageSize, hardRefresh);
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            String eventType = evt.getPropertyName();
+            if (eventType.equals(Case.Events.OS_ACCOUNTS_ADDED.toString())
+                    || eventType.equals(Case.Events.OS_ACCOUNTS_DELETED.toString())) {
+                return true;
+            }
+            return false;
+        }
+    }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsSearchParams.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsSearchParams.java
new file mode 100755
index 0000000000000000000000000000000000000000..e21db61826d9ef76298b8b365fd58d95f96795c9
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/OsAccountsSearchParams.java
@@ -0,0 +1,62 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.mainui.datamodel;
+
+import java.util.Objects;
+
+/**
+ * Key for accessing data about OS Accounts from the DAO.
+ */
+public class OsAccountsSearchParams {
+
+    private final Long dataSourceId;
+ 
+    public OsAccountsSearchParams(Long dataSourceId) {
+        this.dataSourceId = dataSourceId;
+    }
+
+    public Long getDataSourceId() {
+        return dataSourceId;
+    }
+
+    @Override
+    public int hashCode() {
+        int hash = 7;
+        hash = 23 * hash + Objects.hashCode(this.dataSourceId);
+        return hash;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        final OsAccountsSearchParams other = (OsAccountsSearchParams) obj;
+        if (!Objects.equals(this.dataSourceId, other.dataSourceId)) {
+            return false;
+        }
+        return true;
+    }
+}
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java
index c5466eb7c09ef9645926e0170704f6c7a4e054b2..6f88d47289f1be375e19912584ddfdf21b774d45 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/datamodel/ViewsDAO.java
@@ -21,26 +21,31 @@
 import com.google.common.cache.Cache;
 import com.google.common.cache.CacheBuilder;
 import java.beans.PropertyChangeEvent;
+import java.sql.SQLException;
 import java.util.ArrayList;
-import java.util.Arrays;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Map.Entry;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
+import java.util.logging.Level;
+import java.util.logging.Logger;
 import java.util.stream.Collectors;
+import java.util.stream.Stream;
 import org.apache.commons.lang3.StringUtils;
 import org.openide.util.NbBundle;
-import org.openide.util.NbBundle.Messages;
 import org.sleuthkit.autopsy.casemodule.Case;
 import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
 import static org.sleuthkit.autopsy.core.UserPreferences.hideKnownFilesInViewsTree;
 import static org.sleuthkit.autopsy.core.UserPreferences.hideSlackFilesInViewsTree;
-import org.sleuthkit.autopsy.coreutils.TimeZoneUtils;
 import org.sleuthkit.autopsy.datamodel.FileTypeExtensions;
 import org.sleuthkit.autopsy.mainui.datamodel.FileRowDTO.ExtensionMediaType;
+import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
 import org.sleuthkit.autopsy.mainui.nodes.DAOFetcher;
 import org.sleuthkit.datamodel.AbstractFile;
+import org.sleuthkit.datamodel.CaseDbAccessManager.CaseDbPreparedStatement;
 import org.sleuthkit.datamodel.Content;
 import org.sleuthkit.datamodel.SleuthkitCase;
 import org.sleuthkit.datamodel.TskCoreException;
@@ -52,6 +57,8 @@
  */
 public class ViewsDAO {
 
+    private static final Logger logger = Logger.getLogger(ViewsDAO.class.getName());
+
     private static final int CACHE_SIZE = 15; // rule of thumb: 5 entries times number of cached SearchParams sub-types
     private static final long CACHE_DURATION = 2;
     private static final TimeUnit CACHE_DURATION_UNITS = TimeUnit.MINUTES;
@@ -174,59 +181,76 @@ public boolean isFilesBySizeInvalidating(FileTypeSizeSearchParams key, Content e
 
         long size = eventData.getSize();
 
-        switch (key.getSizeFilter()) {
-            case SIZE_50_200:
-                return size >= 50_000_000 && size < 200_000_000;
-            case SIZE_200_1000:
-                return size >= 200_000_000 && size < 1_000_000_000;
-            case SIZE_1000_:
-                return size >= 1_000_000_000;
-            default:
-                throw new IllegalArgumentException("Unsupported filter type to get files by size: " + key.getSizeFilter());
-        }
+        return size >= key.getSizeFilter().getMinBound() && (key.getSizeFilter().getMaxBound() == null || size < key.getSizeFilter().getMaxBound());
     }
 
-//    private ViewFileTableSearchResultsDTO fetchFilesForTable(ViewFileCacheKey cacheKey) throws NoCurrentCaseException, TskCoreException {
-//
-//    }
-//
-//    public ViewFileTableSearchResultsDTO getFilewViewForTable(BlackboardArtifact.Type artType, Long dataSourceId) throws ExecutionException, IllegalArgumentException {
-//        if (artType == null || artType.getCategory() != BlackboardArtifact.Category.DATA_ARTIFACT) {
-//            throw new IllegalArgumentException(MessageFormat.format("Illegal data.  "
-//                    + "Artifact type must be non-null and data artifact.  "
-//                    + "Received {0}", artType));
-//        }
-//
-//        ViewFileCacheKey cacheKey = new ViewFileCacheKey(artType, dataSourceId);
-//        return dataArtifactCache.get(cacheKey, () -> fetchFilesForTable(cacheKey));
-//    }
-    private Map<Integer, Long> fetchFileViewCounts(List<FileExtSearchFilter> filters, Long dataSourceId) throws NoCurrentCaseException, TskCoreException {
-        Map<Integer, Long> counts = new HashMap<>();
-        for (FileExtSearchFilter filter : filters) {
-            String whereClause = getFileExtensionWhereStatement(filter, dataSourceId);
-            long count = getCase().countFilesWhere(whereClause);
-            counts.put(filter.getId(), count);
-        }
+    /**
+     * Returns a sql 'and' clause to filter by data source id if one is present.
+     *
+     * @param dataSourceId The data source id or null.
+     *
+     * @return Returns clause if data source id is present or blank string if
+     *         not.
+     */
+    private static String getDataSourceAndClause(Long dataSourceId) {
+        return (dataSourceId != null && dataSourceId > 0
+                ? " AND data_source_obj_id = " + dataSourceId
+                : " ");
+    }
+
+    /**
+     * Returns clause that will determine if file extension is within the
+     * filter's set of extensions.
+     *
+     * @param filter The filter.
+     *
+     * @return The sql clause that will need to be proceeded with 'where' or
+     *         'and'.
+     */
+    private static String getFileExtensionClause(FileExtSearchFilter filter) {
+        return "extension IN (" + filter.getFilter().stream()
+                .map(String::toLowerCase)
+                .map(s -> "'" + StringUtils.substringAfter(s, ".") + "'")
+                .collect(Collectors.joining(", ")) + ")";
+    }
 
-        return counts;
+    /**
+     * Returns a clause that will filter out files that aren't to be counted in
+     * the file extensions view.
+     *
+     * @return The filter that will need to be proceeded with 'where' or 'and'.
+     */
+    private String getBaseFileExtensionFilter() {
+        return "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
+                + (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known <> " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "");
     }
 
+    /**
+     * Returns a statement to be proceeded with 'where' or 'and' that will
+     * filter results to the provided filter and data source id (if non null).
+     *
+     * @param filter       The file extension filter.
+     * @param dataSourceId The data source id or null if no data source
+     *                     filtering is to occur.
+     *
+     * @return The sql statement to be proceeded with 'and' or 'where'.
+     */
     private String getFileExtensionWhereStatement(FileExtSearchFilter filter, Long dataSourceId) {
-        String whereClause = "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
-                + (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "")
-                + (dataSourceId != null && dataSourceId > 0
-                        ? " AND data_source_obj_id = " + dataSourceId
-                        : " ")
-                + " AND (extension IN (" + filter.getFilter().stream()
-                        .map(String::toLowerCase)
-                        .map(s -> "'" + StringUtils.substringAfter(s, ".") + "'")
-                        .collect(Collectors.joining(", ")) + "))";
+        String whereClause = getBaseFileExtensionFilter()
+                + getDataSourceAndClause(dataSourceId)
+                + " AND (" + getFileExtensionClause(filter) + ")";
         return whereClause;
     }
 
-    private String getFileMimeWhereStatement(String mimeType, Long dataSourceId) {
-
-        String whereClause = "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
+    /**
+     * Returns a statement to be proceeded with 'where' or 'and' that will
+     * filter out results that should not be viewed in mime types view.
+     *
+     * @return A statement to be proceeded with 'and' or 'where'.
+     */
+    private String getBaseFileMimeFilter() {
+        return "(dir_type = " + TskData.TSK_FS_NAME_TYPE_ENUM.REG.getValue() + ")"
+                + (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "")
                 + " AND (type IN ("
                 + TskData.TSK_DB_FILES_TYPE_ENUM.FS.ordinal() + ","
                 + TskData.TSK_DB_FILES_TYPE_ENUM.CARVED.ordinal() + ","
@@ -234,44 +258,331 @@ private String getFileMimeWhereStatement(String mimeType, Long dataSourceId) {
                 + TskData.TSK_DB_FILES_TYPE_ENUM.LAYOUT_FILE.ordinal() + ","
                 + TskData.TSK_DB_FILES_TYPE_ENUM.LOCAL.ordinal()
                 + (hideSlackFilesInViewsTree() ? "" : ("," + TskData.TSK_DB_FILES_TYPE_ENUM.SLACK.ordinal()))
-                + "))"
-                + (dataSourceId != null && dataSourceId > 0 ? " AND data_source_obj_id = " + dataSourceId : " ")
-                + (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "")
-                + " AND mime_type = '" + mimeType + "'";
+                + "))";
+    }
 
+    /**
+     * Returns a sql statement to be proceeded with 'where' or 'and' that will
+     * filter to the specified mime type.
+     *
+     * @param mimeType     The mime type.
+     * @param dataSourceId The data source object id or null if no data source
+     *                     filtering is to occur.
+     *
+     * @return A statement to be proceeded with 'and' or 'where'.
+     */
+    private String getFileMimeWhereStatement(String mimeType, Long dataSourceId) {
+        String whereClause = getBaseFileMimeFilter()
+                + getDataSourceAndClause(dataSourceId)
+                + " AND mime_type = '" + mimeType + "'";
         return whereClause;
     }
 
-    private static String getFileSizesWhereStatement(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId) {
-        String query;
-        switch (filter) {
-            case SIZE_50_200:
-                query = "(size >= 50000000 AND size < 200000000)"; //NON-NLS
-                break;
-            case SIZE_200_1000:
-                query = "(size >= 200000000 AND size < 1000000000)"; //NON-NLS
-                break;
-
-            case SIZE_1000_:
-                query = "(size >= 1000000000)"; //NON-NLS
-                break;
-
-            default:
-                throw new IllegalArgumentException("Unsupported filter type to get files by size: " + filter); //NON-NLS
-        }
+    /**
+     * Returns clause to be proceeded with 'where' or 'and' to filter files to
+     * those within the bounds of the filter.
+     *
+     * @param filter The size filter.
+     *
+     * @return The clause to be proceeded with 'where' or 'and'.
+     */
+    private static String getFileSizeClause(FileTypeSizeSearchParams.FileSizeFilter filter) {
+        return filter.getMaxBound() == null
+                ? "(size >= " + filter.getMinBound() + ")"
+                : "(size >= " + filter.getMinBound() + " AND size < " + filter.getMaxBound() + ")";
+    }
 
+    /**
+     * The filter for all files to remove those that should never be seen in the
+     * file size views.
+     *
+     * @return The clause to be proceeded with 'where' or 'and'.
+     */
+    private String getBaseFileSizeFilter() {
         // Ignore unallocated block files.
-        query += " AND (type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() + ")"; //NON-NLS
+        return "(type != " + TskData.TSK_DB_FILES_TYPE_ENUM.UNALLOC_BLOCKS.getFileType() + ")"
+                + ((hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : "")); //NON-NLS
+    }
 
-        // hide known files if specified by configuration
-        query += (hideKnownFilesInViewsTree() ? (" AND (known IS NULL OR known != " + TskData.FileKnown.KNOWN.getFileKnownValue() + ")") : ""); //NON-NLS
+    /**
+     * Creates a clause to be proceeded with 'where' or 'and' that will show
+     * files specified by the filter and the specified data source.
+     *
+     * @param filter       The file size filter.
+     * @param dataSourceId The id of the data source or null if no data source
+     *                     filtering.
+     *
+     * @return The clause to be proceeded with 'where' or 'and'.
+     */
+    private String getFileSizesWhereStatement(FileTypeSizeSearchParams.FileSizeFilter filter, Long dataSourceId) {
+        String query = getBaseFileSizeFilter()
+                + " AND " + getFileSizeClause(filter)
+                + getDataSourceAndClause(dataSourceId);
 
-        // filter by datasource if indicated in case preferences
-        if (dataSourceId != null && dataSourceId > 0) {
-            query += " AND data_source_obj_id = " + dataSourceId;
+        return query;
+    }
+
+    /**
+     * Returns counts for a collection of file extension search filters.
+     *
+     * @param filters      The filters. Each one will have an entry in the
+     *                     returned results.
+     * @param dataSourceId The data source object id or null if no data source
+     *                     filtering should occur.
+     *
+     * @return The results.
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    public TreeResultsDTO<FileTypeExtensionsSearchParams> getFileExtCounts(Collection<FileExtSearchFilter> filters, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
+        Map<FileExtSearchFilter, String> whereClauses = filters.stream()
+                .collect(Collectors.toMap(
+                        filter -> filter,
+                        filter -> getFileExtensionClause(filter)));
+
+        Map<FileExtSearchFilter, Long> countsByFilter = getFilesCounts(whereClauses, getBaseFileExtensionFilter(), dataSourceId, true);
+
+        List<TreeItemDTO<FileTypeExtensionsSearchParams>> treeList = countsByFilter.entrySet().stream()
+                .map(entry -> {
+                    return new TreeItemDTO<>(
+                            "FILE_EXT",
+                            new FileTypeExtensionsSearchParams(entry.getKey(), dataSourceId),
+                            entry.getKey(),
+                            entry.getKey().getDisplayName(),
+                            entry.getValue());
+                })
+                .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName()))
+                .collect(Collectors.toList());
+
+        return new TreeResultsDTO<>(treeList);
+    }
+
+    /**
+     * Returns counts for file size categories.
+     *
+     * @param dataSourceId The data source object id or null if no data source
+     *                     filtering should occur.
+     *
+     * @return The results.
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    public TreeResultsDTO<FileTypeSizeSearchParams> getFileSizeCounts(Long dataSourceId) throws IllegalArgumentException, ExecutionException {
+        Map<FileTypeSizeSearchParams.FileSizeFilter, String> whereClauses = Stream.of(FileTypeSizeSearchParams.FileSizeFilter.values())
+                .collect(Collectors.toMap(
+                        filter -> filter,
+                        filter -> getFileSizeClause(filter)));
+
+        Map<FileTypeSizeSearchParams.FileSizeFilter, Long> countsByFilter = getFilesCounts(whereClauses, getBaseFileSizeFilter(), dataSourceId, true);
+
+        List<TreeItemDTO<FileTypeSizeSearchParams>> treeList = countsByFilter.entrySet().stream()
+                .map(entry -> {
+                    return new TreeItemDTO<>(
+                            "FILE_SIZE",
+                            new FileTypeSizeSearchParams(entry.getKey(), dataSourceId),
+                            entry.getKey(),
+                            entry.getKey().getDisplayName(),
+                            entry.getValue());
+                })
+                .sorted((a, b) -> a.getDisplayName().compareToIgnoreCase(b.getDisplayName()))
+                .collect(Collectors.toList());
+
+        return new TreeResultsDTO<>(treeList);
+    }
+
+    /**
+     * Returns counts for file mime type categories.
+     *
+     * @param prefix       The prefix mime type (i.e. 'application', 'audio').
+     *                     If null, prefix counts are gathered.
+     * @param dataSourceId The data source object id or null if no data source
+     *                     filtering should occur.
+     *
+     * @return The results.
+     *
+     * @throws IllegalArgumentException
+     * @throws ExecutionException
+     */
+    public TreeResultsDTO<FileTypeMimeSearchParams> getFileMimeCounts(String prefix, Long dataSourceId) throws IllegalArgumentException, ExecutionException {
+        String prefixWithSlash = StringUtils.isNotBlank(prefix) ? prefix.replaceAll("/", "") + "/" : null;
+        String likeItem = StringUtils.isNotBlank(prefixWithSlash) ? prefixWithSlash.replaceAll("%", "") + "%" : null;
+
+        String baseFilter = "WHERE " + getBaseFileMimeFilter()
+                + getDataSourceAndClause(dataSourceId)
+                + (StringUtils.isNotBlank(prefix) ? " AND mime_type LIKE ? " : " AND mime_type IS NOT NULL ");
+
+        try {
+            SleuthkitCase skCase = getCase();
+            String mimeType;
+            if (StringUtils.isNotBlank(prefix)) {
+                mimeType = "mime_type";
+            } else {
+                switch (skCase.getDatabaseType()) {
+                    case POSTGRESQL:
+                        mimeType = "SPLIT_PART(mime_type, '/', 1)";
+                        break;
+                    case SQLITE:
+                        mimeType = "SUBSTR(mime_type, 0, instr(mime_type, '/'))";
+                        break;
+                    default:
+                        throw new IllegalArgumentException("Unknown database type: " + skCase.getDatabaseType());
+                }
+            }
+
+            String query = mimeType + " AS mime_type, COUNT(*) AS count\n"
+                    + "FROM tsk_files\n"
+                    + baseFilter + "\n"
+                    + "GROUP BY " + mimeType;
+
+            Map<String, Long> typeCounts = new HashMap<>();
+
+            try (CaseDbPreparedStatement casePreparedStatement = skCase.getCaseDbAccessManager().prepareSelect(query)) {
+
+                if (likeItem != null) {
+                    casePreparedStatement.setString(1, likeItem);
+                }
+
+                skCase.getCaseDbAccessManager().select(casePreparedStatement, (resultSet) -> {
+                    try {
+                        while (resultSet.next()) {
+                            String mimeTypeId = resultSet.getString("mime_type");
+                            if (mimeTypeId != null) {
+                                long count = resultSet.getLong("count");
+                                typeCounts.put(mimeTypeId, count);
+                            }
+                        }
+                    } catch (SQLException ex) {
+                        logger.log(Level.WARNING, "An error occurred while fetching file mime type counts.", ex);
+                    }
+                });
+
+                List<TreeItemDTO<FileTypeMimeSearchParams>> treeList = typeCounts.entrySet().stream()
+                        .map(entry -> {
+                            String name = prefixWithSlash != null && entry.getKey().startsWith(prefixWithSlash)
+                                    ? entry.getKey().substring(prefixWithSlash.length())
+                                    : entry.getKey();
+
+                            return new TreeItemDTO<>(
+                                    "FILE_MIME_TYPE",
+                                    new FileTypeMimeSearchParams(entry.getKey(), dataSourceId),
+                                    name,
+                                    name,
+                                    entry.getValue());
+                        })
+                        .sorted((a, b) -> stringCompare(a.getTypeData().getMimeType(), b.getTypeData().getMimeType()))
+                        .collect(Collectors.toList());
+
+                return new TreeResultsDTO<>(treeList);
+            } catch (TskCoreException | SQLException ex) {
+                throw new ExecutionException("An error occurred while fetching file counts with query:\n" + query, ex);
+            }
+        } catch (NoCurrentCaseException ex) {
+            throw new ExecutionException("An error occurred while fetching file counts.", ex);
         }
+    }
 
-        return query;
+    /**
+     * Provides case insensitive comparator integer for strings that may be
+     * null.
+     *
+     * @param a String that may be null.
+     * @param b String that may be null.
+     *
+     * @return The comparator value placing null first.
+     */
+    private int stringCompare(String a, String b) {
+        if (a == null && b == null) {
+            return 0;
+        } else if (a == null) {
+            return -1;
+        } else if (b == null) {
+            return 1;
+        } else {
+            return a.compareToIgnoreCase(b);
+        }
+    }
+
+    /**
+     * Determines counts for files in multiple categories.
+     *
+     * @param whereClauses     A mapping of objects to their respective where
+     *                         clauses.
+     * @param baseFilter       A filter for files applied before performing
+     *                         groupings and counts. It shouldn't have a leading
+     *                         'AND' or 'WHERE'.
+     * @param dataSourceId     The data source object id or null if no data
+     *                         source filtering.
+     * @param includeZeroCount Whether or not to return an item if there are 0
+     *                         matches.
+     *
+     * @return A mapping of the keys in the 'whereClauses' mapping to their
+     *         respective counts.
+     *
+     * @throws ExecutionException
+     */
+    private <T> Map<T, Long> getFilesCounts(Map<T, String> whereClauses, String baseFilter, Long dataSourceId, boolean includeZeroCount) throws ExecutionException {
+        // get artifact types and counts
+
+        Map<Integer, T> types = new HashMap<>();
+        String whenClauses = "";
+
+        int idx = 0;
+        for (Entry<T, String> e : whereClauses.entrySet()) {
+            types.put(idx, e.getKey());
+            whenClauses += "    WHEN " + e.getValue() + " THEN " + idx + " \n";
+            idx++;
+        }
+
+        String switchStatement = "  CASE \n"
+                + whenClauses
+                + "    ELSE -1 \n"
+                + "  END AS type_id \n";
+
+        String dataSourceClause = dataSourceId != null && dataSourceId > 0 ? "data_source_obj_id = " + dataSourceId : null;
+
+        String baseWhereClauses = Stream.of(dataSourceClause, baseFilter)
+                .filter(s -> StringUtils.isNotBlank(s))
+                .collect(Collectors.joining(" AND "));
+
+        String query = "res.type_id, COUNT(*) AS count FROM \n"
+                + "(SELECT \n"
+                + switchStatement
+                + "FROM tsk_files \n"
+                + (baseWhereClauses != null ? ("WHERE " + baseWhereClauses) : "") + ") res \n"
+                + "WHERE res.type_id >= 0 \n"
+                + "GROUP BY res.type_id";
+
+        Map<T, Long> typeCounts = new HashMap<>();
+        try {
+            SleuthkitCase skCase = getCase();
+
+            skCase.getCaseDbAccessManager().select(query, (resultSet) -> {
+                try {
+                    while (resultSet.next()) {
+                        int typeIdx = resultSet.getInt("type_id");
+                        T type = types.remove(typeIdx);
+                        if (type != null) {
+                            long count = resultSet.getLong("count");
+                            typeCounts.put(type, count);
+                        }
+                    }
+                } catch (SQLException ex) {
+                    logger.log(Level.WARNING, "An error occurred while fetching file type counts.", ex);
+                }
+            });
+        } catch (NoCurrentCaseException | TskCoreException ex) {
+            throw new ExecutionException("An error occurred while fetching file counts with query:\n" + query, ex);
+        }
+
+        if (includeZeroCount) {
+            for (T remaining : types.values()) {
+                typeCounts.put(remaining, 0L);
+            }
+        }
+
+        return typeCounts;
     }
 
     private SearchResultsDTO fetchExtensionSearchResultsDTOs(FileExtSearchFilter filter, Long dataSourceId, long startItem, Long maxResultCount) throws NoCurrentCaseException, TskCoreException {
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java
index 4c43fcb5a75a82cc76d40547a6acb2ce4e1aec61..60459768d11a012c9cf3875d57503cc0658d82f4 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/AnalysisResultTypeFactory.java
@@ -18,10 +18,15 @@
  */
 package org.sleuthkit.autopsy.mainui.nodes;
 
+import org.sleuthkit.autopsy.mainui.datamodel.KeywordSearchTermParams;
+import org.sleuthkit.autopsy.mainui.datamodel.KeywordMatchParams;
 import com.google.common.collect.ImmutableSet;
 import java.beans.PropertyChangeEvent;
 import java.util.Set;
 import java.util.concurrent.ExecutionException;
+import org.openide.nodes.ChildFactory;
+import org.openide.nodes.Children;
+import org.openide.util.NbBundle.Messages;
 import org.sleuthkit.autopsy.casemodule.Case;
 import org.sleuthkit.autopsy.casemodule.NoCurrentCaseException;
 import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
@@ -30,8 +35,11 @@
 import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
 import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultDAO;
 import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSearchParam;
+import org.sleuthkit.autopsy.mainui.datamodel.AnalysisResultSetSearchParam;
+import org.sleuthkit.autopsy.mainui.datamodel.KeywordHitSearchParam;
 import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
 import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
+import static org.sleuthkit.autopsy.mainui.nodes.TreeNode.getDefaultLookup;
 import org.sleuthkit.datamodel.BlackboardArtifact;
 import org.sleuthkit.datamodel.BlackboardArtifact.Category;
 
@@ -39,7 +47,7 @@
  * Factory for displaying analysis result types in the tree.
  */
 public class AnalysisResultTypeFactory extends TreeChildFactory<AnalysisResultSearchParam> {
-    
+
     private static Set<Integer> SET_TREE_ARTIFACTS = ImmutableSet.of(
             BlackboardArtifact.Type.TSK_HASHSET_HIT.getTypeID(),
             BlackboardArtifact.Type.TSK_INTERESTING_ARTIFACT_HIT.getTypeID(),
@@ -76,13 +84,13 @@ protected TreeResultsDTO<? extends AnalysisResultSearchParam> getChildResults()
 
     @Override
     protected TreeNode<AnalysisResultSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> rowData) {
-//        if (SET_TREE_ARTIFACTS.contains(rowData.getTypeData().getArtifactType().getTypeID())) {
-//            return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null));
-//        } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getTypeData().getArtifactType())) {
-//            return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null));
-//        } else {
-        return new AnalysisResultTypeTreeNode(rowData);
-//        }
+        if (SET_TREE_ARTIFACTS.contains(rowData.getTypeData().getArtifactType().getTypeID())) {
+            return new TreeTypeNode(rowData, new TreeSetFactory(rowData.getTypeData().getArtifactType(), dataSourceId, null));
+        } else if (BlackboardArtifact.Type.TSK_KEYWORD_HIT.equals(rowData.getTypeData().getArtifactType())) {
+            return new TreeTypeNode(rowData, new KeywordSetFactory(dataSourceId));
+        } else {
+            return new AnalysisResultTypeTreeNode(rowData);
+        }
     }
 
     @Override
@@ -175,191 +183,260 @@ public void respondSelection(DataResultTopComponent dataResultPanel) {
         }
     }
 
-//
-//    /**
-//     * An analysis result type node that has nested children.
-//     */
-//    static class TreeTypeNode extends TreeNode<AnalysisResultSearchParam> {
-//
-//        /**
-//         * Main constructor.
-//         *
-//         * @param itemData The data to display.
-//         */
-//        public TreeTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> itemData, ChildFactory childFactory) {
-//            super(itemData.getTypeData().getArtifactType().getTypeName(),
-//                    getIconPath(itemData.getTypeData().getArtifactType()),
-//                    itemData,
-//                    Children.create(childFactory, true),
-//                    getDefaultLookup(itemData));
-//        }
-//
-//        @Override
-//        public void respondSelection(DataResultTopComponent dataResultPanel) {
-//            // GVDTODO...NO OP???
-//        }
-//    }
-//
-//    /**
-//     * Factory displaying all hashset sets with count in the tree.
-//     */
-//    static class TreeSetFactory extends TreeChildFactory<AnalysisResultSetSearchParam> {
-//
-//        private final BlackboardArtifact.Type artifactType;
-//        private final Long dataSourceId;
-//        private final String nullSetName;
-//
-//        /**
-//         * Main constructor.
-//         *
-//         * @param artifactType The type of artifact.
-//         * @param dataSourceId The data source object id for which the results
-//         *                     should be filtered or null if no data source
-//         *                     filtering.
-//         * @param nullSetName  The name of the set for artifacts with no
-//         *                     TSK_SET_NAME value. If null, items are omitted.
-//         */
-//        public TreeSetFactory(BlackboardArtifact.Type artifactType, Long dataSourceId, String nullSetName) {
-//            this.artifactType = artifactType;
-//            this.dataSourceId = dataSourceId;
-//            this.nullSetName = nullSetName;
-//        }
-//
-//        @Override
-//        protected TreeResultsDTO<? extends AnalysisResultSetSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
-//            return MainDAO.getInstance().getAnalysisResultDAO().getSetCounts(this.artifactType, this.dataSourceId, this.nullSetName);
-//        }
-//
-//        @Override
-//        public boolean isRefreshRequired(PropertyChangeEvent evt) {
-//            return AnalysisResultTypeFactory.isRefreshRequired(artifactType, evt);
-//        }
-//
-//        @Override
-//        protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
-//            return new TreeSetTypeNode(rowData, Children.LEAF);
-//        }
-//    }
-//
-//    /**
-//     * A node for a set within an artifact type.
-//     */
-//    static class TreeSetTypeNode extends TreeNode<AnalysisResultSetSearchParam> {
-//
-//        /**
-//         * Main constructor.
-//         *
-//         * @param artifactType The type of artifact.
-//         * @param itemData     The data to display.
-//         */
-//        public TreeSetTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> itemData, Children children) {
-//            super(itemData.getTypeData().getArtifactType().getTypeName(),
-//                    getIconPath(itemData.getTypeData().getArtifactType()),
-//                    itemData,
-//                    children,
-//                    getDefaultLookup(itemData));
-//        }
-//
-//        @Override
-//        public void respondSelection(DataResultTopComponent dataResultPanel) {
-//            dataResultPanel.displayAnalysisResultSet(this.getItemData().getTypeData());
-//        }
-//    }
-//    
-//    
-//    @Messages({
-//        "AnalysisResultTypeFactory_adHocName=Adhoc Results"
-//    })
-//    static class KeywordSetFactory extends TreeSetFactory {
-//        
-//        public KeywordSetFactory(Long dataSourceId) {
-//            super(BlackboardArtifact.Type.TSK_KEYWORD_HIT, dataSourceId, Bundle.AnalysisResultTypeFactory_adHocName());
-//        }
-//
-//        @Override
-//        protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
-//            return new TreeSetTypeNode(rowData, Children.LEAF);
-//        }
-//        
-//        
-//        
-//    }
-//    
-//    public static class KeywordSearchTermParams {
-//        private final String setName;
-//        private final String searchTerm;
-//        private final boolean hasChildren;
-//        private final Long dataSourceId;
-//
-//        public KeywordSearchTermParams(String setName, String searchTerm, boolean hasChildren, Long dataSourceId) {
-//            this.setName = setName;
-//            this.searchTerm = searchTerm;
-//            this.hasChildren = hasChildren;
-//            this.dataSourceId = dataSourceId;
-//        }
-//        
-//        public String getSetName() {
-//            return setName;
-//        }
-//
-//        public String getSearchTerm() {
-//            return searchTerm;
-//        }
-//
-//        public boolean hasChildren() {
-//            return hasChildren;
-//        }
-//
-//        public Long getDataSourceId() {
-//            return dataSourceId;
-//        }
-//    }
-//    
-//    static class KeywordSearchTermFactory extends TreeChildFactory<KeywordSearchTermParams> {
-//        private final AnalysisResultSetSearchParam setParams;
-//
-//        public KeywordSearchTermFactory(AnalysisResultSetSearchParam setParams) {
-//            this.setParams = setParams;
-//        }
-//        
-//        
-//        @Override
-//        protected TreeNode<KeywordSearchTermParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> rowData) {
-//            return new KeywordSearchTermNode(rowData);
-//        }
-//
-//        @Override
-//        protected TreeResultsDTO<? extends KeywordSearchTermParams> getChildResults() throws IllegalArgumentException, ExecutionException {
-//            return MainDAO.getInstance().getAnalysisResultDAO().getKeywordSetCounts(this.setParams);
-//        }
-//
-//        @Override
-//        public boolean isRefreshRequired(PropertyChangeEvent evt) {
-//            return AnalysisResultTypeFactory.isRefreshRequired(BlackboardArtifact.Type.TSK_KEYWORD_HIT, evt);
-//        }
-//        
-//    }
-//    
-//    static class KeywordSearchTermNode extends TreeNode<KeywordSearchTermParams> {
-//
-//        public KeywordSearchTermNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> itemData) {
-//            super(itemData.getTypeData().getSearchTerm(), 
-//                    getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT), 
-//                    itemData, 
-//                    itemData.getTypeData().hasChildren() ? Children.create(new KeywordFoundMatchFactory(itemData), true) : Children.LEAF, 
-//                    getDefaultLookup(itemData));
-//        }
-//
-//        @Override
-//        public void respondSelection(DataResultTopComponent dataResultPanel) {
-//            KeywordSearchTermParams searchParams = this.getItemData().getTypeData();
-//            
-//            if (!searchParams.hasChildren()) {
-//                dataResultPanel.displayKeywordHits(new KeywordHitSearchParam(searchParams.getDataSourceId(), searchParams.getSetName(), null, searchParams.getSearchTerm()));
-//            }
-//        }
-//        
-//    }
-//    public static class KeywordFoundMatchFactory 
-//    public static class KeywordFoundMatchNode 
+    /**
+     * An analysis result type node that has nested children.
+     */
+    static class TreeTypeNode extends TreeNode<AnalysisResultSearchParam> {
+
+        /**
+         * Main constructor.
+         *
+         * @param itemData The data to display.
+         */
+        public TreeTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSearchParam> itemData, ChildFactory<?> childFactory) {
+            super(itemData.getTypeData().getArtifactType().getTypeName(),
+                    getIconPath(itemData.getTypeData().getArtifactType()),
+                    itemData,
+                    Children.create(childFactory, true),
+                    getDefaultLookup(itemData));
+        }
+    }
+
+    /**
+     * Factory displaying all hashset sets with count in the tree.
+     */
+    static class TreeSetFactory extends TreeChildFactory<AnalysisResultSetSearchParam> {
+
+        private final BlackboardArtifact.Type artifactType;
+        private final Long dataSourceId;
+        private final String nullSetName;
+
+        /**
+         * Main constructor.
+         *
+         * @param artifactType The type of artifact.
+         * @param dataSourceId The data source object id for which the results
+         *                     should be filtered or null if no data source
+         *                     filtering.
+         * @param nullSetName  The name of the set for artifacts with no
+         *                     TSK_SET_NAME value. If null, items are omitted.
+         */
+        public TreeSetFactory(BlackboardArtifact.Type artifactType, Long dataSourceId, String nullSetName) {
+            this.artifactType = artifactType;
+            this.dataSourceId = dataSourceId;
+            this.nullSetName = nullSetName;
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends AnalysisResultSetSearchParam> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getAnalysisResultDAO().getSetCounts(this.artifactType, this.dataSourceId, this.nullSetName);
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            return AnalysisResultTypeFactory.isRefreshRequired(artifactType, evt);
+        }
+
+        @Override
+        protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
+            return new TreeSetTypeNode(rowData);
+        }
+    }
+
+    /**
+     * A node for a set within an artifact type.
+     */
+    static class TreeSetTypeNode extends TreeNode<AnalysisResultSetSearchParam> {
+
+        /**
+         * Main constructor.
+         *
+         * @param itemData     The data to display.
+         */
+        public TreeSetTypeNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> itemData) {
+            super(itemData.getTypeData().getArtifactType().getTypeName(),
+                    getIconPath(itemData.getTypeData().getArtifactType()),
+                    itemData,
+                    Children.LEAF,
+                    getDefaultLookup(itemData));
+        }
+
+        @Override
+        public void respondSelection(DataResultTopComponent dataResultPanel) {
+            dataResultPanel.displayAnalysisResultSet(this.getItemData().getTypeData());
+        }
+    }
+
+    /**
+     * A factory that shows all sets in keyword hits.
+     */
+    @Messages({
+        "AnalysisResultTypeFactory_adHocName=Adhoc Results"
+    })
+    static class KeywordSetFactory extends TreeSetFactory {
+
+        public KeywordSetFactory(Long dataSourceId) {
+            super(BlackboardArtifact.Type.TSK_KEYWORD_HIT, dataSourceId, Bundle.AnalysisResultTypeFactory_adHocName());
+        }
+
+        @Override
+        protected TreeNode<AnalysisResultSetSearchParam> createNewNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> rowData) {
+            return new KeywordSetNode(rowData);
+        }
+    }
+
+    static class KeywordSetNode extends TreeNode<AnalysisResultSetSearchParam> {
+
+        /**
+         * Main constructor.
+         *
+         * @param itemData     The data to display.
+         */
+        public KeywordSetNode(TreeResultsDTO.TreeItemDTO<? extends AnalysisResultSetSearchParam> itemData) {
+            super(itemData.getTypeData().getArtifactType().getTypeName(),
+                    getIconPath(itemData.getTypeData().getArtifactType()),
+                    itemData,
+                    Children.create(new KeywordSearchTermFactory(itemData.getTypeData()), true),
+                    getDefaultLookup(itemData));
+        }        
+    }
+
+    /**
+     * Factory for displaying all search terms (regex or exact) for a specific
+     * set.
+     */
+    static class KeywordSearchTermFactory extends TreeChildFactory<KeywordSearchTermParams> {
+
+        private final AnalysisResultSetSearchParam setParams;
+
+        /**
+         * Main constructor.
+         *
+         * @param setParams The parameters for the set.
+         */
+        public KeywordSearchTermFactory(AnalysisResultSetSearchParam setParams) {
+            this.setParams = setParams;
+        }
+
+        @Override
+        protected TreeNode<KeywordSearchTermParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> rowData) {
+            return new KeywordSearchTermNode(rowData);
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends KeywordSearchTermParams> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getAnalysisResultDAO().getKeywordSearchTermCounts(this.setParams.getSetName(), this.setParams.getDataSourceId());
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            return AnalysisResultTypeFactory.isRefreshRequired(BlackboardArtifact.Type.TSK_KEYWORD_HIT, evt);
+        }
+
+    }
+
+    /**
+     * A node for an individual search term.
+     */
+    static class KeywordSearchTermNode extends TreeNode<KeywordSearchTermParams> {
+
+        /**
+         * Main constructor.
+         *
+         * @param itemData The data for the search term.
+         */
+        public KeywordSearchTermNode(TreeResultsDTO.TreeItemDTO<? extends KeywordSearchTermParams> itemData) {
+            super(itemData.getTypeData().getSearchTerm(),
+                    getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT),
+                    itemData,
+                    itemData.getTypeData().hasChildren() ? Children.create(new KeywordFoundMatchFactory(itemData.getTypeData()), true) : Children.LEAF,
+                    getDefaultLookup(itemData));
+        }
+
+        @Override
+        public void respondSelection(DataResultTopComponent dataResultPanel) {
+            KeywordSearchTermParams searchParams = this.getItemData().getTypeData();
+
+            if (!searchParams.hasChildren()) {
+                dataResultPanel.displayKeywordHits(
+                        new KeywordHitSearchParam(
+                                searchParams.getDataSourceId(),
+                                searchParams.getSetName(),
+                                null,
+                                searchParams.getSearchTerm()));
+            } else {
+                super.respondSelection(dataResultPanel);
+            }
+        }
+
+    }
+
+    /**
+     * A factory for found keyword matches based on the search term (for
+     * regex/substring).
+     */
+    public static class KeywordFoundMatchFactory extends TreeChildFactory<KeywordMatchParams> {
+
+        private final KeywordSearchTermParams setParams;
+
+        /**
+         * Main constructor.
+         *
+         * @param params The search term parameters.
+         */
+        public KeywordFoundMatchFactory(KeywordSearchTermParams params) {
+            this.setParams = params;
+        }
+
+        @Override
+        protected TreeNode<KeywordMatchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends KeywordMatchParams> rowData) {
+            return new KeywordFoundMatchNode(rowData);
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends KeywordMatchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getAnalysisResultDAO().getKeywordMatchCounts(
+                    this.setParams.getSetName(),
+                    this.setParams.getSearchTerm(),
+                    this.setParams.getSearchType(),
+                    this.setParams.getDataSourceId());
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            return AnalysisResultTypeFactory.isRefreshRequired(BlackboardArtifact.Type.TSK_KEYWORD_HIT, evt);
+        }
+    }
+
+    /**
+     * A node signifying a match for a specific keyword given a regex/substring
+     * search term.
+     */
+    static class KeywordFoundMatchNode extends TreeNode<KeywordMatchParams> {
+
+        /**
+         * Main constructor.
+         *
+         * @param itemData The data for the match parameters.
+         */
+        public KeywordFoundMatchNode(TreeResultsDTO.TreeItemDTO<? extends KeywordMatchParams> itemData) {
+            super(itemData.getTypeData().getKeywordMatch(),
+                    getIconPath(BlackboardArtifact.Type.TSK_KEYWORD_HIT),
+                    itemData,
+                    Children.LEAF,
+                    getDefaultLookup(itemData));
+        }
+
+        @Override
+        public void respondSelection(DataResultTopComponent dataResultPanel) {
+            KeywordMatchParams searchParams = this.getItemData().getTypeData();
+            dataResultPanel.displayKeywordHits(new KeywordHitSearchParam(
+                    searchParams.getDataSourceId(),
+                    searchParams.getSetName(),
+                    searchParams.getKeywordMatch(),
+                    searchParams.getSearchTerm()));
+        }
+
+    }
+
 }
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED
index 08b42899657bd4d7c51c59bbe974636fe8e19152..64865cab30326c6aec6b9c7a66f3e24d398ee282 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED
+++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/Bundle.properties-MERGED
@@ -1,2 +1,8 @@
+AnalysisResultTypeFactory_adHocName=Adhoc Results
 ImageNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files
+SearchResultRootNode_createSheet_childCount_displayName=Child Count
+SearchResultRootNode_createSheet_childCount_name=Child Count
+SearchResultRootNode_createSheet_type_displayName=Name
+SearchResultRootNode_createSheet_type_name=Name
+SearchResultRootNode_noDesc=No Description
 VolumnNode_ExtractUnallocAction_text=Extract Unallocated Space to Single Files
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchResultRootNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchResultRootNode.java
index b683dd0105c35d4672fd5be356e1e7d181a4c473..0fad099c01797dda853f8a0f41354025d747c7fd 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchResultRootNode.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/SearchResultRootNode.java
@@ -22,6 +22,7 @@
 import org.openide.nodes.Children;
 import org.openide.nodes.Sheet;
 import org.openide.util.NbBundle;
+import org.openide.util.NbBundle.Messages;
 import org.sleuthkit.autopsy.datamodel.NodeProperty;
 import org.sleuthkit.autopsy.mainui.datamodel.SearchResultsDTO;
 
@@ -45,6 +46,13 @@ private SearchResultRootNode(SearchResultsDTO initialResults, SearchResultChildF
         setDisplayName(initialResults.getDisplayName());
     }
 
+    @Messages({
+        "SearchResultRootNode_noDesc=No Description",
+        "SearchResultRootNode_createSheet_type_name=Name",
+        "SearchResultRootNode_createSheet_type_displayName=Name",
+        "SearchResultRootNode_createSheet_childCount_name=Child Count",
+        "SearchResultRootNode_createSheet_childCount_displayName=Child Count"
+    })
     @Override
     protected Sheet createSheet() {
         Sheet sheet = super.createSheet();
@@ -54,14 +62,16 @@ protected Sheet createSheet() {
             sheet.put(sheetSet);
         }
 
-        sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.name"),
-                NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.displayName"),
-                NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.artType.desc"),
+        sheetSet.put(new NodeProperty<>(
+                Bundle.SearchResultRootNode_createSheet_type_name(),
+                Bundle.SearchResultRootNode_createSheet_type_displayName(),
+                Bundle.SearchResultRootNode_noDesc(),
                 getDisplayName()));
 
-        sheetSet.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.name"),
-                NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.displayName"),
-                NbBundle.getMessage(this.getClass(), "ArtifactTypeNode.createSheet.childCnt.desc"),
+        sheetSet.put(new NodeProperty<>(
+                Bundle.SearchResultRootNode_createSheet_childCount_name(),
+                Bundle.SearchResultRootNode_createSheet_childCount_displayName(),
+                Bundle.SearchResultRootNode_noDesc(),
                 this.factory.getResultCount()));
 
         return sheet;
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java
index 848501d92973d42f80d688bc8c7b0f6d07e425d0..7229825d89880e04b9541abbfe20ad2348ff9579 100644
--- a/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java
+++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/TreeNode.java
@@ -24,6 +24,7 @@
 import org.openide.nodes.Children;
 import org.openide.util.Lookup;
 import org.openide.util.lookup.Lookups;
+import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
 import org.sleuthkit.autopsy.coreutils.Logger;
 import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO.TreeItemDTO;
 
@@ -129,4 +130,11 @@ public void update(TreeItemDTO<? extends T> updatedData) {
         this.itemData = updatedData;
         updateDisplayName(prevData, updatedData);
     }
+
+    @Override
+    public void respondSelection(DataResultTopComponent dataResultPanel) {
+        dataResultPanel.setNode(this);
+    }
+    
+    
 }
diff --git a/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java
new file mode 100644
index 0000000000000000000000000000000000000000..a608827b2464fa7a75e19ed9e0c47411a827334b
--- /dev/null
+++ b/Core/src/org/sleuthkit/autopsy/mainui/nodes/ViewsTypeFactory.java
@@ -0,0 +1,350 @@
+/*
+ * Autopsy Forensic Browser
+ *
+ * Copyright 2021 Basis Technology Corp.
+ * Contact: carrier <at> sleuthkit <dot> org
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.sleuthkit.autopsy.mainui.nodes;
+
+import java.beans.PropertyChangeEvent;
+import java.util.Collection;
+import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+import org.openide.nodes.Children;
+import org.sleuthkit.autopsy.corecomponents.DataResultTopComponent;
+import org.sleuthkit.autopsy.ingest.ModuleContentEvent;
+import org.sleuthkit.autopsy.mainui.datamodel.FileExtDocumentFilter;
+import org.sleuthkit.autopsy.mainui.datamodel.FileExtExecutableFilter;
+import org.sleuthkit.autopsy.mainui.datamodel.FileExtRootFilter;
+import org.sleuthkit.autopsy.mainui.datamodel.FileExtSearchFilter;
+import org.sleuthkit.autopsy.mainui.datamodel.FileTypeExtensionsSearchParams;
+import org.sleuthkit.autopsy.mainui.datamodel.FileTypeMimeSearchParams;
+import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams;
+import org.sleuthkit.autopsy.mainui.datamodel.FileTypeSizeSearchParams.FileSizeFilter;
+import org.sleuthkit.autopsy.mainui.datamodel.MainDAO;
+import org.sleuthkit.autopsy.mainui.datamodel.TreeResultsDTO;
+import org.sleuthkit.datamodel.AbstractFile;
+
+/**
+ *
+ * Factories for displaying views.
+ */
+public class ViewsTypeFactory {
+
+    /**
+     * Returns an AbstractFile if the event contains a ModuleContentEvent which
+     * contains an abstract file and that file belongs to the data source if a
+     * data source id is specified. Otherwise, returns null.
+     *
+     * @param evt          The event
+     * @param dataSourceId The data source object id that will be the parent of
+     *                     the file or null.
+     *
+     * @return The file meeting criteria or null.
+     */
+    private static AbstractFile getFileInDataSourceFromEvt(PropertyChangeEvent evt, Long dataSourceId) {
+        if (!(evt.getOldValue() instanceof ModuleContentEvent)) {
+            return null;
+        }
+
+        ModuleContentEvent contentEvt = (ModuleContentEvent) evt.getOldValue();
+        if (!(contentEvt.getSource() instanceof AbstractFile)) {
+            return null;
+        }
+
+        AbstractFile file = (AbstractFile) contentEvt.getSource();
+        if (dataSourceId != null && file.getDataSourceObjectId() != dataSourceId) {
+            return null;
+        }
+
+        return file;
+    }
+
+    /**
+     * The factory for creating file size tree nodes.
+     */
+    public static class FileSizeTypeFactory extends TreeChildFactory<FileTypeSizeSearchParams> {
+
+        private final Long dataSourceId;
+
+        /**
+         * Main constructor.
+         *
+         * @param dataSourceId The data source to filter files to or null.
+         */
+        public FileSizeTypeFactory(Long dataSourceId) {
+            this.dataSourceId = dataSourceId;
+        }
+
+        @Override
+        protected TreeNode<FileTypeSizeSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeSizeSearchParams> rowData) {
+            return new FileSizeTypeNode(rowData);
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends FileTypeSizeSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getViewsDAO().getFileSizeCounts(this.dataSourceId);
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            AbstractFile evtFile = getFileInDataSourceFromEvt(evt, this.dataSourceId);
+            if (evtFile == null) {
+                return false;
+            }
+
+            long size = evtFile.getSize();
+            for (FileSizeFilter filter : FileSizeFilter.values()) {
+                if (size >= filter.getMinBound() || size < filter.getMaxBound()) {
+                    return true;
+                }
+            }
+
+            return false;
+        }
+
+        /**
+         * Shows a file size tree node.
+         */
+        static class FileSizeTypeNode extends TreeNode<FileTypeSizeSearchParams> {
+
+            /**
+             * Main constructor.
+             *
+             * @param itemData The data for the node.
+             */
+            FileSizeTypeNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeSizeSearchParams> itemData) {
+                super("FILE_SIZE_" + itemData.getTypeData().getSizeFilter().getName(), "org/sleuthkit/autopsy/images/file-size-16.png", itemData);
+            }
+
+            @Override
+            public void respondSelection(DataResultTopComponent dataResultPanel) {
+                dataResultPanel.displayFileSizes(this.getItemData().getTypeData());
+            }
+
+        }
+    }
+
+    /**
+     * Factory to display mime type prefix tree nodes (i.e. audio, multipart).
+     */
+    public static class FileMimePrefixFactory extends TreeChildFactory<FileTypeMimeSearchParams> {
+
+        private final Long dataSourceId;
+
+        /**
+         * Main constructor.
+         *
+         * @param dataSourceId The data source to filter files to or null.
+         */
+        public FileMimePrefixFactory(Long dataSourceId) {
+            this.dataSourceId = dataSourceId;
+        }
+
+        @Override
+        protected TreeNode<FileTypeMimeSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeMimeSearchParams> rowData) {
+            return new FileMimePrefixNode(rowData);
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends FileTypeMimeSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getViewsDAO().getFileMimeCounts(null, this.dataSourceId);
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            return getFileInDataSourceFromEvt(evt, this.dataSourceId) != null;
+        }
+
+        static class FileMimePrefixNode extends TreeNode<FileTypeMimeSearchParams> {
+
+            /**
+             * Main constructor.
+             *
+             * @param itemData The data for the node.
+             */
+            public FileMimePrefixNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeMimeSearchParams> itemData) {
+                super(
+                        "FILE_MIME_" + itemData.getTypeData().getMimeType(),
+                        "org/sleuthkit/autopsy/images/file_types.png",
+                        itemData,
+                        Children.create(new FileMimeSuffixFactory(itemData.getTypeData().getDataSourceId(), itemData.getTypeData().getMimeType()), true),
+                        getDefaultLookup(itemData));
+            }
+        }
+    }
+
+    /**
+     * Displays mime type suffixes of a prefix (i.e. for prefix 'audio', a
+     * suffix could be 'aac').
+     */
+    public static class FileMimeSuffixFactory extends TreeChildFactory<FileTypeMimeSearchParams> {
+
+        private final String mimeTypePrefix;
+        private final Long dataSourceId;
+
+        /**
+         * Main constructor.
+         *
+         * @param dataSourceId   The data source to filter files to or null.
+         * @param mimeTypePrefix The mime type prefix (i.e. 'audio',
+         *                       'multipart').
+         */
+        private FileMimeSuffixFactory(Long dataSourceId, String mimeTypePrefix) {
+            this.dataSourceId = dataSourceId;
+            this.mimeTypePrefix = mimeTypePrefix;
+        }
+
+        @Override
+        protected TreeNode<FileTypeMimeSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeMimeSearchParams> rowData) {
+            return new FileMimeSuffixNode(rowData);
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends FileTypeMimeSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getViewsDAO().getFileMimeCounts(this.mimeTypePrefix, this.dataSourceId);
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            AbstractFile file = getFileInDataSourceFromEvt(evt, dataSourceId);
+            if (file == null || file.getMIMEType() == null) {
+                return false;
+            }
+
+            return file.getMIMEType().toLowerCase().startsWith(this.mimeTypePrefix.toLowerCase());
+        }
+
+        /**
+         * Displays an individual suffix node in the tree (i.e. 'aac' underneath
+         * 'audio').
+         */
+        static class FileMimeSuffixNode extends TreeNode<FileTypeMimeSearchParams> {
+
+            /**
+             * Main constructor.
+             *
+             * @param itemData The data for the node.
+             */
+            public FileMimeSuffixNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeMimeSearchParams> itemData) {
+                super("FILE_MIME_" + itemData.getTypeData().getMimeType(),
+                        "org/sleuthkit/autopsy/images/file-filter-icon.png",
+                        itemData);
+            }
+
+            @Override
+            public void respondSelection(DataResultTopComponent dataResultPanel) {
+                dataResultPanel.displayFileMimes(this.getItemData().getTypeData());
+            }
+
+        }
+    }
+
+    /**
+     * Displays file extension tree nodes with possibly nested tree nodes (for
+     * documents and executables).
+     */
+    public static class FileExtFactory extends TreeChildFactory<FileTypeExtensionsSearchParams> {
+
+        private final Long dataSourceId;
+        private final Collection<FileExtSearchFilter> childFilters;
+
+        /**
+         * Main constructor using root filters.
+         *
+         * @param dataSourceId The data source to filter files to or null.
+         */
+        public FileExtFactory(Long dataSourceId) {
+            this(dataSourceId, Stream.of(FileExtRootFilter.values()).collect(Collectors.toList()));
+        }
+
+        /**
+         * Main constructor.
+         *
+         * @param dataSourceId The data source to filter files to or null.
+         * @param childFilters The file extension filters that will each be a
+         *                     child tree node of this factory.
+         */
+        private FileExtFactory(Long dataSourceId, Collection<FileExtSearchFilter> childFilters) {
+            this.childFilters = childFilters;
+            this.dataSourceId = dataSourceId;
+        }
+
+        @Override
+        protected TreeNode<FileTypeExtensionsSearchParams> createNewNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeExtensionsSearchParams> rowData) {
+            Collection<FileExtSearchFilter> childFilters;
+            if (rowData.getTypeData().getFilter() == FileExtRootFilter.TSK_DOCUMENT_FILTER) {
+                childFilters = Stream.of(FileExtDocumentFilter.values()).collect(Collectors.toList());
+            } else if (rowData.getTypeData().getFilter() == FileExtRootFilter.TSK_EXECUTABLE_FILTER) {
+                childFilters = Stream.of(FileExtExecutableFilter.values()).collect(Collectors.toList());
+            } else {
+                childFilters = null;
+            }
+
+            return new FileExtNode(rowData, childFilters);
+        }
+
+        @Override
+        protected TreeResultsDTO<? extends FileTypeExtensionsSearchParams> getChildResults() throws IllegalArgumentException, ExecutionException {
+            return MainDAO.getInstance().getViewsDAO().getFileExtCounts(this.childFilters, this.dataSourceId);
+        }
+
+        @Override
+        public boolean isRefreshRequired(PropertyChangeEvent evt) {
+            AbstractFile file = getFileInDataSourceFromEvt(evt, this.dataSourceId);
+            return file != null && this.childFilters.stream()
+                    .anyMatch((filter) -> MainDAO.getInstance().getViewsDAO().isFilesByExtInvalidating(
+                    new FileTypeExtensionsSearchParams(filter, this.dataSourceId), file));
+        }
+
+        /**
+         * Represents a file extension tree node that may or may not have child
+         * filters.
+         */
+        static class FileExtNode extends TreeNode<FileTypeExtensionsSearchParams> {
+
+            private final Collection<FileExtSearchFilter> childFilters;
+
+            /**
+             * Main constructor.
+             *
+             * @param itemData     The data for the node.
+             * @param childFilters The file filters that will be used to make
+             *                     children of this node.
+             */
+            public FileExtNode(TreeResultsDTO.TreeItemDTO<? extends FileTypeExtensionsSearchParams> itemData, Collection<FileExtSearchFilter> childFilters) {
+                super("FILE_EXT_" + itemData.getTypeData().getFilter().getName(),
+                        childFilters == null ? "org/sleuthkit/autopsy/images/file-filter-icon.png" : "org/sleuthkit/autopsy/images/file_types.png",
+                        itemData,
+                        childFilters == null ? Children.LEAF : Children.create(new FileExtFactory(itemData.getTypeData().getDataSourceId(), childFilters), true),
+                        getDefaultLookup(itemData));
+
+                this.childFilters = childFilters;
+            }
+
+            @Override
+            public void respondSelection(DataResultTopComponent dataResultPanel) {
+                if (childFilters == null) {
+                    dataResultPanel.displayFileExtensions(this.getItemData().getTypeData());
+                } else {
+                    super.respondSelection(dataResultPanel);
+                }
+            }
+
+        }
+    }
+
+}
diff --git a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java
index 992dcb91584197c93a16e51fc8e5134e483b3adb..a96aac8bfbd2f18edfe890d07a7d53c76c12982c 100644
--- a/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java
+++ b/Core/test/qa-functional/src/org/sleuthkit/autopsy/mainui/datamodel/TableSearchTest.java
@@ -24,6 +24,7 @@
 import java.util.HashSet;
 import java.util.concurrent.ExecutionException;
 import java.util.List;
+import java.util.Optional;
 import java.util.Set;
 import java.util.stream.Collectors;
 import junit.framework.Assert;
@@ -37,7 +38,6 @@
 import org.sleuthkit.autopsy.testutils.TestUtilsException;
 import org.sleuthkit.datamodel.AbstractFile;
 import org.sleuthkit.datamodel.AnalysisResult;
-import org.sleuthkit.datamodel.Attribute;
 import org.sleuthkit.datamodel.Blackboard;
 import org.sleuthkit.datamodel.Blackboard.BlackboardException;
 import org.sleuthkit.datamodel.BlackboardArtifact;
@@ -47,9 +47,14 @@
 import org.sleuthkit.datamodel.DataSource;
 import org.sleuthkit.datamodel.FileSystem;
 import org.sleuthkit.datamodel.Host;
+import org.sleuthkit.datamodel.HostManager;
 import org.sleuthkit.datamodel.Person;
 import org.sleuthkit.datamodel.Pool;
 import org.sleuthkit.datamodel.Image;
+import org.sleuthkit.datamodel.OsAccount;
+import org.sleuthkit.datamodel.OsAccountInstance;
+import org.sleuthkit.datamodel.OsAccountManager;
+import org.sleuthkit.datamodel.OsAccountRealm;
 import org.sleuthkit.datamodel.Score;
 import org.sleuthkit.datamodel.SleuthkitCase;
 import org.sleuthkit.datamodel.TagName;
@@ -119,6 +124,9 @@ public class TableSearchTest extends NbTestCase {
     private static final String PERSON_HOST_NAME1 = "Host for Person A";
     private static final String PERSON_HOST_NAME2 = "Host for Person B";
     
+    // OS Accounts test
+    private static final String REALM_NAME_COLUMN = "Realm Name";
+    private static final String HOST_COLUMN = "Host";    
     
     /////////////////////////////////////////////////
     // Data to be used across the test methods.
@@ -128,6 +136,7 @@ public class TableSearchTest extends NbTestCase {
     SleuthkitCase db = null;       // The case database
     Blackboard blackboard = null;  // The blackboard
     TagsManager tagsManager = null;// Tags manager
+    OsAccountManager accountMgr = null;
 
     DataSource dataSource1 = null; // A local files data source
     DataSource dataSource2 = null; // A local files data source
@@ -172,6 +181,9 @@ public class TableSearchTest extends NbTestCase {
     // Tags test
     TagName knownTag1 = null;
     TagName tag2 = null;
+    
+    // OS Accounts test
+    OsAccount osAccount1 = null;
 
     public static Test suite() {
         NbModuleSuite.Configuration conf = NbModuleSuite.createConfiguration(TableSearchTest.class).
@@ -199,6 +211,7 @@ public void testTableSearches() {
         sizeSearchTest();
         fileSystemTest();
         tagsTest();
+        OsAccountsTest();
     }
 
     /**
@@ -212,6 +225,7 @@ private void setUpCaseDatabase() {
             db = openCase.getSleuthkitCase();
             blackboard = db.getBlackboard();
             tagsManager = openCase.getServices().getTagsManager();
+            accountMgr = openCase.getSleuthkitCase().getOsAccountManager();
 
             // Add two logical files data sources
             trans = db.beginTransaction();
@@ -467,8 +481,20 @@ private void setUpCaseDatabase() {
             
             // Tag the custom file in data source 2
             openCase.getServices().getTagsManager().addContentTag(customFile, knownTag1);
-
-        } catch (TestUtilsException | TskCoreException | BlackboardException | TagsManager.TagNameAlreadyExistsException ex) {
+            
+            // Add OS Accounts ---------------------            
+            HostManager hostMrg = openCase.getSleuthkitCase().getHostManager();                        
+            Host host1 = hostMrg.getHostByDataSource(dataSource1);            
+            OsAccount osAccount2 = accountMgr.newWindowsOsAccount("S-1-5-21-647283-46237-200", null, null, host1, OsAccountRealm.RealmScope.LOCAL);
+            accountMgr.newOsAccountInstance(osAccount2, dataSource1, OsAccountInstance.OsAccountInstanceType.ACCESSED);
+            OsAccount osAccount3 = accountMgr.newWindowsOsAccount("S-1-5-21-647283-46237-300", null, null, host1, OsAccountRealm.RealmScope.UNKNOWN);
+            accountMgr.newOsAccountInstance(osAccount3, dataSource1, OsAccountInstance.OsAccountInstanceType.REFERENCED);
+            
+            Host host2 = hostMrg.getHostByDataSource(dataSource2);
+            osAccount1 = accountMgr.newWindowsOsAccount("S-1-5-21-647283-46237-100", null, null, host2, OsAccountRealm.RealmScope.DOMAIN);
+            accountMgr.newOsAccountInstance(osAccount1, dataSource2, OsAccountInstance.OsAccountInstanceType.LAUNCHED);
+            
+        } catch (TestUtilsException | TskCoreException | BlackboardException | TagsManager.TagNameAlreadyExistsException | OsAccountManager.NotUserSIDException ex) {
             if (trans != null) {
                 try {
                     trans.rollback();
@@ -759,7 +785,52 @@ public void tagsTest() {
             Exceptions.printStackTrace(ex);
             Assert.fail(ex.getMessage());
         }
-    }    
+    }
+    
+    public void OsAccountsTest() {
+        // Quick test that everything is initialized
+        assertTrue(db != null);
+
+        try {
+            OsAccountsDAO accountsDAO = MainDAO.getInstance().getOsAccountsDAO();
+
+            // Get OS Accounts from data source 1
+            OsAccountsSearchParams param = new OsAccountsSearchParams(dataSource1.getId());
+            SearchResultsDTO results = accountsDAO.getAccounts(param, 0, null, false);
+            assertEquals(2, results.getTotalResultsCount());
+            assertEquals(2, results.getItems().size());
+
+            // Get OS Accounts from all data sources
+            param = new OsAccountsSearchParams(null);
+            results = accountsDAO.getAccounts(param, 0, null, false);
+            assertEquals(3, results.getTotalResultsCount());
+            assertEquals(3, results.getItems().size());
+            
+            // Get OS Accounts from data source 1
+            param = new OsAccountsSearchParams(dataSource2.getId());
+            results = accountsDAO.getAccounts(param, 0, null, false);
+            assertEquals(1, results.getTotalResultsCount());
+            assertEquals(1, results.getItems().size());
+            
+            // Get the row
+            RowDTO rowDTO = results.getItems().get(0);
+            assertTrue(rowDTO instanceof BaseRowDTO);
+            BaseRowDTO osAccountRowDTO = (BaseRowDTO) rowDTO;
+
+            // Check that the result is for the custom OS Account
+            Optional<String> addr = osAccount1.getAddr();
+            assertTrue(osAccountRowDTO.getCellValues().contains(addr.get()));            
+            
+            // Check that a few of the expected OS Account column names are present
+            List<String> columnDisplayNames = results.getColumns().stream().map(p -> p.getDisplayName()).collect(Collectors.toList());
+            assertTrue(columnDisplayNames.contains(REALM_NAME_COLUMN));
+            assertTrue(columnDisplayNames.contains(HOST_COLUMN));
+
+        } catch (ExecutionException ex) {
+            Exceptions.printStackTrace(ex);
+            Assert.fail(ex.getMessage());
+        }
+    }     
     
     public void analysisResultSearchTest() {
         // Quick test that everything is initialized
@@ -859,13 +930,13 @@ private void hashHitSearchTest() {
             // Test hash set hits
             AnalysisResultDAO analysisResultDAO = MainDAO.getInstance().getAnalysisResultDAO();
             HashHitSearchParam hashParam = new HashHitSearchParam(null, HASH_SET_1);
-            AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getHashHitsForTable(hashParam, 0, null, false);
+            AnalysisResultTableSearchResultsDTO results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null, false);
             assertEquals(BlackboardArtifact.Type.TSK_HASHSET_HIT, results.getArtifactType());
             assertEquals(3, results.getTotalResultsCount());
             assertEquals(3, results.getItems().size());
             
             hashParam = new HashHitSearchParam(dataSource2.getId(), HASH_SET_1);
-            results = analysisResultDAO.getHashHitsForTable(hashParam, 0, null, false);
+            results = analysisResultDAO.getAnalysisResultSetHits(hashParam, 0, null, false);
             assertEquals(BlackboardArtifact.Type.TSK_HASHSET_HIT, results.getArtifactType());
             assertEquals(1, results.getTotalResultsCount());
             assertEquals(1, results.getItems().size());
@@ -1131,5 +1202,6 @@ public void tearDown() {
         db = null;
         blackboard = null;
         tagsManager = null;
+        accountMgr = null;
     }
 }
diff --git a/CoreLibs/ivy.xml b/CoreLibs/ivy.xml
index 563d49c7f8e8cbc75c038a5d8e54d1844ba36b1d..4c9bd7c95d61ade41d2d2ce0e2603f695569d931 100644
--- a/CoreLibs/ivy.xml
+++ b/CoreLibs/ivy.xml
@@ -14,7 +14,7 @@
         
         <!-- for viewers -->
         <dependency conf="autopsy_core->*" org="org.freedesktop.gstreamer" name="gst1-java-core" rev="1.0.0"/>
-        <dependency conf="autopsy_core->*" org="net.java.dev.jna" name="jna-platform" rev="5.9.0"/>
+        <dependency conf="autopsy_core->*" org="net.java.dev.jna" name="jna-platform" rev="5.10.0"/>
         
         <!-- for file search -->
         <dependency conf="autopsy_core->*" org="com.github.lgooddatepicker" name="LGoodDatePicker" rev="10.3.1"/>
diff --git a/CoreLibs/nbproject/project.properties b/CoreLibs/nbproject/project.properties
index e9a54d6d1e9503748e86172a86b0d834a25f58ee..eb387920329b0ed030f32a48749f44e6446dbe1d 100644
--- a/CoreLibs/nbproject/project.properties
+++ b/CoreLibs/nbproject/project.properties
@@ -42,8 +42,8 @@ file.reference.javassist-3.12.1.GA.jar=release/modules/ext/javassist-3.12.1.GA.j
 file.reference.jfxtras-common-8.0-r4.jar=release/modules/ext/jfxtras-common-8.0-r4.jar
 file.reference.jfxtras-controls-8.0-r4.jar=release/modules/ext/jfxtras-controls-8.0-r4.jar
 file.reference.jfxtras-fxml-8.0-r4.jar=release/modules/ext/jfxtras-fxml-8.0-r4.jar
-file.reference.jna-5.9.0.jar=release/modules/ext/jna-5.9.0.jar
-file.reference.jna-platform-5.9.0.jar=release/modules/ext/jna-platform-5.9.0.jar
+file.reference.jna-5.9.0.jar=release/modules/ext/jna-5.10.0.jar
+file.reference.jna-platform-5.9.0.jar=release/modules/ext/jna-platform-5.10.0.jar
 file.reference.joda-time-2.4.jar=release/modules/ext/joda-time-2.4.jar
 file.reference.jsr305-1.3.9.jar=release/modules/ext/jsr305-1.3.9.jar
 file.reference.LGoodDatePicker-10.3.1.jar=release/modules/ext/LGoodDatePicker-10.3.1.jar
diff --git a/CoreLibs/nbproject/project.xml b/CoreLibs/nbproject/project.xml
index b26152055f72b74efc0bfb298bcecf96f4615287..8bc464da72f7f78aeeb906577d70fc12b60ce7c0 100644
--- a/CoreLibs/nbproject/project.xml
+++ b/CoreLibs/nbproject/project.xml
@@ -923,8 +923,8 @@
                 <binary-origin>release/modules/ext/commons-compress-1.18.jar</binary-origin>
             </class-path-extension>
             <class-path-extension>
-                <runtime-relative-path>ext/jna-platform-5.9.0.jar</runtime-relative-path>
-                <binary-origin>release\modules\ext\jna-platform-5.9.0.jar</binary-origin>
+                <runtime-relative-path>ext/jna-platform-5.10.0.jar</runtime-relative-path>
+                <binary-origin>release\modules\ext\jna-platform-5.10.0.jar</binary-origin>
             </class-path-extension>
             <class-path-extension>
                 <runtime-relative-path>ext/opencv-248.jar</runtime-relative-path>
@@ -951,8 +951,8 @@
                 <binary-origin>release/modules/ext/imageio-bmp-3.2.jar</binary-origin>
             </class-path-extension>
             <class-path-extension>
-                <runtime-relative-path>ext/jna-5.9.0.jar</runtime-relative-path>
-                <binary-origin>release\modules\ext\jna-5.9.0.jar</binary-origin>
+                <runtime-relative-path>ext/jna-5.10.0.jar</runtime-relative-path>
+                <binary-origin>release\modules\ext\jna-5.10.0.jar</binary-origin>
             </class-path-extension>
             <class-path-extension>
                 <runtime-relative-path>ext/commons-lang-2.6.jar</runtime-relative-path>