diff --git a/.gitignore b/.gitignore index 8d3e3e44c57f2420ad75e0d5f14cb7a050d8000e..b89812646fed9df2e3607649e978a5ce6b05f605 100644 --- a/.gitignore +++ b/.gitignore @@ -34,8 +34,6 @@ /Ingest/nbproject/* !/Ingest/nbproject/project.xml !/Ingest/nbproject/project.properties -/branding_spear -/installer_spear */genfiles.properties genfiles.properties /branding/core/core.jar/org/netbeans/core/startup/Bundle.properties @@ -55,9 +53,14 @@ genfiles.properties /test/script/ScriptLog.txt /test/script/__pycache__/ /test/script/*.pyc +/test/script/DBDump-Diff.txt +/test/script/DBDump.txt +/test/script/SortedData-Diff.txt +/test/script/SortedData.txt /test/build/ /test/dist/ /test/nbproject/* + !/Testing/nbproject/project.xml !/Testing/nbproject/project.properties *~ diff --git a/Core/src/org/sleuthkit/autopsy/modules/android/CallLogAnalyzer.java b/Core/src/org/sleuthkit/autopsy/modules/android/CallLogAnalyzer.java index 481d3aa23cbb8fa13c8f820c42e85d12be978127..d85755bce5767daca35d3fa2c62f68076383a843 100755 --- a/Core/src/org/sleuthkit/autopsy/modules/android/CallLogAnalyzer.java +++ b/Core/src/org/sleuthkit/autopsy/modules/android/CallLogAnalyzer.java @@ -19,114 +19,126 @@ package org.sleuthkit.autopsy.modules.android; import java.io.File; +import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; -import java.util.List; +import java.util.Arrays; import java.util.logging.Level; +import java.util.stream.Collectors; +import java.util.stream.Stream; import org.sleuthkit.autopsy.casemodule.Case; import org.sleuthkit.autopsy.coreutils.Logger; import org.sleuthkit.autopsy.datamodel.ContentUtils; import org.sleuthkit.datamodel.AbstractFile; import org.sleuthkit.datamodel.BlackboardArtifact; import org.sleuthkit.datamodel.BlackboardAttribute; +import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.SleuthkitCase; import org.sleuthkit.datamodel.TskCoreException; class CallLogAnalyzer { private static final String moduleName = AndroidModuleFactory.getModuleName(); + private static final Logger logger = Logger.getLogger(CallLogAnalyzer.class.getName()); + /** the where clause(without 'where' of sql select statement to choose call + * log dbs, update the list of file names to include more files */ + private static final String fileNameQuery = Stream.of("'logs.db'", "'contacts2.db'", "'contacts.db'") + .collect(Collectors.joining(" OR name = ", "name = ", "")); + + /** the names of tables that potentially hold call logs in the dbs */ + private static final Iterable<String> tableNames = Arrays.asList("calls", "logs"); + public static void findCallLogs() { - List<AbstractFile> absFiles; try { SleuthkitCase skCase = Case.getCurrentCase().getSleuthkitCase(); - absFiles = skCase.findAllFilesWhere("name ='contacts2.db' OR name ='contacts.db'"); //get exact file names - if (absFiles.isEmpty()) { - return; - } - for (AbstractFile abstractFile : absFiles) { - try { - File jFile = new java.io.File(Case.getCurrentCase().getTempDirectory(), abstractFile.getName()); - ContentUtils.writeToFile(abstractFile, jFile); - findCallLogsInDB(jFile.toString(), abstractFile); - } catch (Exception e) { - logger.log(Level.SEVERE, "Error parsing Call logs", e); + for (AbstractFile abstractFile : skCase.findAllFilesWhere(fileNameQuery)) { + try { + File file = new File(Case.getCurrentCase().getTempDirectory(), abstractFile.getName()); + ContentUtils.writeToFile(abstractFile, file); + findCallLogsInDB(file.toString(), abstractFile); + } catch (IOException e) { + logger.log(Level.SEVERE, "Error writing temporary call log db to disk", e); } } } catch (TskCoreException e) { - logger.log(Level.SEVERE, "Error finding Call logs", e); + logger.log(Level.SEVERE, "Error finding call logs", e); } } private static void findCallLogsInDB(String DatabasePath, AbstractFile f) { - Connection connection = null; - ResultSet resultSet = null; - Statement statement = null; if (DatabasePath == null || DatabasePath.isEmpty()) { return; } - try { - Class.forName("org.sqlite.JDBC"); //load JDBC driver - connection = DriverManager.getConnection("jdbc:sqlite:" + DatabasePath); - statement = connection.createStatement(); - } catch (ClassNotFoundException | SQLException e) { - logger.log(Level.SEVERE, "Error opening database", e); - return; - } + try (Connection connection = DriverManager.getConnection("jdbc:sqlite:" + DatabasePath); + Statement statement = connection.createStatement();) { - try { - resultSet = statement.executeQuery( - "SELECT number,date,duration,type, name FROM calls ORDER BY date DESC;"); - - BlackboardArtifact bba; - - while (resultSet.next()) { - // name of person dialed or called. null if unregistered - String name = resultSet.getString("name"); - String number = resultSet.getString("number"); - //duration of call in seconds - Long duration = Long.valueOf(resultSet.getString("duration")); - Long date = Long.valueOf(resultSet.getString("date")) / 1000; - - String direction = ""; - switch (Integer.valueOf(resultSet.getString("type"))) { - case 1: - direction = "Incoming"; - break; - case 2: - direction = "Outgoing"; - break; - case 3: - direction = "Missed"; - break; - } + for (String tableName : tableNames) { + try (ResultSet resultSet = statement.executeQuery( + "SELECT number,date,duration,type, name FROM " + tableName + " ORDER BY date DESC;");) { + logger.log(Level.INFO, "Reading call log from table {0} in db {1}", new Object[]{tableName, DatabasePath}); + while (resultSet.next()) { + Long date = resultSet.getLong("date") / 1000; + final CallDirection direction = CallDirection.fromType(resultSet.getInt("type")); + String directionString = direction != null ? direction.getDisplayName() : ""; + final String number = resultSet.getString("number"); + final long duration = resultSet.getLong("duration");//duration of call is in seconds + final String name = resultSet.getString("name");// name of person dialed or called. null if unregistered - bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG); //create a call log and then add attributes from result set. - bba.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(), moduleName, number)); - bba.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID(), moduleName, date)); - bba.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID(), moduleName, duration + date)); - bba.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID(), moduleName, direction)); - bba.addAttribute(new BlackboardAttribute(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), moduleName, name)); - } - } catch (Exception e) { - logger.log(Level.SEVERE, "Error parsing Call logs to the Blackboard", e); - } finally { - try { - if (resultSet != null) { - resultSet.close(); + try { + BlackboardArtifact bba = f.newArtifact(BlackboardArtifact.ARTIFACT_TYPE.TSK_CALLLOG); //create a call log and then add attributes from result set. + bba.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_PHONE_NUMBER.getTypeID(), moduleName, number)); + bba.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_START.getTypeID(), moduleName, date)); + bba.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DATETIME_END.getTypeID(), moduleName, duration + date)); + bba.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DIRECTION.getTypeID(), moduleName, directionString)); + bba.addAttribute(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_NAME.getTypeID(), moduleName, name)); + } catch (TskCoreException ex) { + logger.log(Level.SEVERE, "Error posting call log record to the Blackboard", ex); + } + } + } catch (SQLException e) { + logger.log(Level.WARNING, "Could not read table {0} in db {1}", new Object[]{tableName, DatabasePath}); } - statement.close(); - connection.close(); - } catch (Exception e) { - logger.log(Level.SEVERE, "Error closing the database", e); } + } catch (SQLException e) { + logger.log(Level.SEVERE, "Could not parse call log; error connecting to db " + DatabasePath, e); } + } + + private static enum CallDirection { + + INCOMING(1, "Incoming"), OUTGOING(2, "Outgoing"), MISSED(3, "Missed"); + private final int type; + + private final String displayName; + + public String getDisplayName() { + return displayName; + } + + private CallDirection(int type, String displayName) { + this.type = type; + this.displayName = displayName; + } + + static CallDirection fromType(int t) { + switch (t) { + case 1: + return INCOMING; + case 2: + return OUTGOING; + case 3: + return MISSED; + default: + return null; + } + } } } diff --git a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java index 582240e371f46fc95b79deae1fe696db05f6b411..f322e828c946441a0862c51bf4d9593fc7844f92 100644 --- a/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java +++ b/RecentActivity/src/org/sleuthkit/autopsy/recentactivity/SearchEngineURLQueryAnalyzer.java @@ -24,9 +24,7 @@ import java.net.URLDecoder; import java.util.ArrayList; import java.util.Collection; -import java.util.HashMap; -import java.util.Map; -import java.util.Set; +import java.util.List; import java.util.logging.Level; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; @@ -45,7 +43,7 @@ import org.sleuthkit.datamodel.BlackboardAttribute; import org.sleuthkit.datamodel.BlackboardAttribute.ATTRIBUTE_TYPE; import org.sleuthkit.datamodel.Content; -import org.sleuthkit.datamodel.TskException; +import org.sleuthkit.datamodel.TskCoreException; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.NodeList; @@ -65,13 +63,8 @@ class SearchEngineURLQueryAnalyzer extends Extract { private static final Logger logger = Logger.getLogger(SearchEngineURLQueryAnalyzer.class.getName()); private static final String XMLFILE = "SEUQAMappings.xml"; //NON-NLS private static final String XSDFILE = "SearchEngineSchema.xsd"; //NON-NLS - private static String[] searchEngineNames; private static SearchEngineURLQueryAnalyzer.SearchEngine[] engines; - private static Document xmlinput; - private static final SearchEngineURLQueryAnalyzer.SearchEngine NullEngine = new SearchEngineURLQueryAnalyzer.SearchEngine( - NbBundle.getMessage(SearchEngineURLQueryAnalyzer.class, "SearchEngineURLQueryAnalyzer.engineName.none"), - NbBundle.getMessage(SearchEngineURLQueryAnalyzer.class, "SearchEngineURLQueryAnalyzer.domainSubStr.none"), - new HashMap<String,String>()); + private Content dataSource; private IngestJobContext context; @@ -79,52 +72,100 @@ class SearchEngineURLQueryAnalyzer extends Extract { moduleName = NbBundle.getMessage(ExtractIE.class, "SearchEngineURLQueryAnalyzer.moduleName.text"); } + /** + * Stores the regular expression and non-reg exp pair of keys. + * Key in the case of "?q=foo" would be "?q=". + */ + private static class KeyPair { + private final String key; + private final String keyRegExp; + + KeyPair (String key, String keyRegExp) { + this.key = key; + this.keyRegExp = keyRegExp; + } + + String getKey() { + return key; + } + + + String getKeyRegExp() { + return keyRegExp; + } + + } private static class SearchEngine { - private String _engineName; - private String _domainSubstring; - private Map<String, String> _splits; - private int _count; + private final String engineName; + private final String domainSubstring; + private final List<KeyPair> keyPairs; + private int count; - SearchEngine(String engineName, String domainSubstring, Map<String, String> splits) { - _engineName = engineName; - _domainSubstring = domainSubstring; - _splits = splits; - _count = 0; + SearchEngine(String engineName, String domainSubstring, List<KeyPair> keyPairs) { + this.engineName = engineName; + this.domainSubstring = domainSubstring; + this.keyPairs = keyPairs; + count = 0; } void increment() { - ++_count; + ++count; } String getEngineName() { - return _engineName; + return engineName; } String getDomainSubstring() { - return _domainSubstring; + return domainSubstring; } int getTotal() { - return _count; + return count; } - Set<Map.Entry<String, String>> getSplits() { - return this._splits.entrySet(); + /** + * Get the key values used in the URL to denote the search term + * @return + */ + List<KeyPair> getKeys() { + return this.keyPairs; } @Override public String toString() { String split = " "; - for (Map.Entry<String, String> kvp : getSplits()) { - split = split + "[ " + kvp.getKey() + " :: " + kvp.getValue() + " ]" + ", "; + for (KeyPair kp : keyPairs) { + split = split + "[ " + kp.getKey() + " :: " + kp.getKeyRegExp() + " ]" + ", "; } return NbBundle.getMessage(this.getClass(), "SearchEngineURLQueryAnalyzer.toString", - _engineName, _domainSubstring, _count, split); + engineName, domainSubstring, count, split); } } - private void createEngines() { + private void loadConfigFile() throws IngestModuleException { + Document xmlinput; + try { + String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE; + File f = new File(path); + logger.log(Level.INFO, "Load successful"); //NON-NLS + DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); + DocumentBuilder db = dbf.newDocumentBuilder(); + xmlinput = db.parse(f); + + if (!XMLUtil.xmlIsValid(xmlinput, SearchEngineURLQueryAnalyzer.class, XSDFILE)) { + logger.log(Level.WARNING, "Error loading Search Engines: could not validate against [" + XSDFILE + "], results may not be accurate."); //NON-NLS + } + + } catch (IOException e) { + throw new IngestModuleException("Was not able to load SEUQAMappings.xml: " + e.getLocalizedMessage()); //NON-NLS + } catch (ParserConfigurationException pce) { + throw new IngestModuleException("Unable to build XML parser: " + pce.getLocalizedMessage()); //NON-NLS + } catch (SAXException sxe) { + throw new IngestModuleException("Unable to parse XML file: " + sxe.getLocalizedMessage()); //NON-NLS + } + NodeList nlist = xmlinput.getElementsByTagName("SearchEngine"); //NON-NLS SearchEngineURLQueryAnalyzer.SearchEngine[] listEngines = new SearchEngineURLQueryAnalyzer.SearchEngine[nlist.getLength()]; for (int i = 0; i < nlist.getLength(); i++) { @@ -132,16 +173,17 @@ private void createEngines() { String EngineName = nnm.getNamedItem("engine").getNodeValue(); //NON-NLS String EnginedomainSubstring = nnm.getNamedItem("domainSubstring").getNodeValue(); //NON-NLS - Map<String, String> splits = new HashMap<>(); + List<KeyPair> keys = new ArrayList<>(); + NodeList listSplits = xmlinput.getElementsByTagName("splitToken"); //NON-NLS for (int k = 0; k < listSplits.getLength(); k++) { if (listSplits.item(k).getParentNode().getAttributes().getNamedItem("engine").getNodeValue().equals(EngineName)) { //NON-NLS - splits.put(listSplits.item(k).getAttributes().getNamedItem("plainToken").getNodeValue(), listSplits.item(k).getAttributes().getNamedItem("regexToken").getNodeValue()); //NON-NLS + keys.add( new KeyPair(listSplits.item(k).getAttributes().getNamedItem("plainToken").getNodeValue(), listSplits.item(k).getAttributes().getNamedItem("regexToken").getNodeValue())); //NON-NLS } } - SearchEngineURLQueryAnalyzer.SearchEngine Se = new SearchEngineURLQueryAnalyzer.SearchEngine(EngineName, EnginedomainSubstring, splits); + SearchEngineURLQueryAnalyzer.SearchEngine Se = new SearchEngineURLQueryAnalyzer.SearchEngine(EngineName, EnginedomainSubstring, keys); //System.out.println("Search Engine: " + Se.toString()); listEngines[i] = Se; } @@ -153,28 +195,22 @@ private void createEngines() { * belongs to. * * @param domain domain as part of the URL - * @return supported search engine the domain belongs to, if any + * @return supported search engine the domain belongs to or null if no match is found * */ - private static SearchEngineURLQueryAnalyzer.SearchEngine getSearchEngine(String domain) { + private static SearchEngineURLQueryAnalyzer.SearchEngine getSearchEngineFromUrl(String domain) { if (engines == null) { - return SearchEngineURLQueryAnalyzer.NullEngine; + return null; } - for (int i = 0; i < engines.length; i++) { - if (domain.contains(engines[i].getDomainSubstring())) { - return engines[i]; + for (SearchEngine engine : engines) { + if (domain.contains(engine.getDomainSubstring())) { + return engine; } } - return SearchEngineURLQueryAnalyzer.NullEngine; + return null; } - private void getSearchEngineNames() { - String[] listNames = new String[engines.length]; - for (int i = 0; i < listNames.length; i++) { - listNames[i] = engines[i]._engineName; - } - searchEngineNames = listNames; - } + /** * Attempts to extract the query from a URL. @@ -182,12 +218,12 @@ private void getSearchEngineNames() { * @param url The URL string to be dissected. * @return The extracted search query. */ - private String extractSearchEngineQuery(String url) { - String x = "NoQuery"; //NON-NLS - SearchEngineURLQueryAnalyzer.SearchEngine eng = getSearchEngine(url); - for (Map.Entry<String, String> kvp : eng.getSplits()) { - if (url.contains(kvp.getKey())) { - x = split2(url, kvp.getValue()); + private String extractSearchEngineQuery(SearchEngineURLQueryAnalyzer.SearchEngine eng, String url) { + String x = ""; //NON-NLS + + for (KeyPair kp : eng.getKeys()) { + if (url.contains(kp.getKey())) { + x = getValue(url, kp.getKeyRegExp()); break; } } @@ -204,38 +240,48 @@ private String extractSearchEngineQuery(String url) { * Splits URLs based on a delimeter (key). .contains() and .split() * * @param url The URL to be split - * @param value the delimeter value used to split the URL into its search + * @param regExpKey the delimeter value used to split the URL into its search * token, extracted from the xml. * @return The extracted search query * */ - private String split2(String url, String value) { - String basereturn = "NoQuery"; //NON-NLS - String v = value; + private String getValue(String url, String regExpKey) { + /* NOTE: This doesn't seem like the most wonderful way to do this, but we have data + * that has a bunch of bogus URLs. Such as: + * - Multiple google "q=" terms, including one after a "#" tag. Google used the last one + * - Search/query part of the URL starting with a '#'. + * Attemps at more formal approaches of splitting on the "?" and then on "&" resulting in missing things. + */ + String value = ""; //NON-NLS + String v = regExpKey; //Want to determine if string contains a string based on splitkey, but we want to split the string on splitKeyConverted due to regex - if (value.contains("\\?")) { - v = value.replace("\\?", "?"); + if (regExpKey.contains("\\?")) { + v = regExpKey.replace("\\?", "?"); } String[] sp = url.split(v); if (sp.length >= 2) { if (sp[sp.length - 1].contains("&")) { - basereturn = sp[sp.length - 1].split("&")[0]; + value = sp[sp.length - 1].split("&")[0]; } else { - basereturn = sp[sp.length - 1]; + value = sp[sp.length - 1]; } } - return basereturn; + return value; } - private void getURLs() { + private void findSearchQueries() { int totalQueries = 0; try { //from blackboard_artifacts Collection<BlackboardArtifact> listArtifacts = currentCase.getSleuthkitCase().getMatchingArtifacts("WHERE (`artifact_type_id` = '" + ARTIFACT_TYPE.TSK_WEB_BOOKMARK.getTypeID() //NON-NLS + "' OR `artifact_type_id` = '" + ARTIFACT_TYPE.TSK_WEB_HISTORY.getTypeID() + "') "); //List of every 'web_history' and 'bookmark' artifact NON-NLS logger.log(Level.INFO, "Processing {0} blackboard artifacts.", listArtifacts.size()); //NON-NLS - getAll: + for (BlackboardArtifact artifact : listArtifacts) { + if (context.isJobCancelled()) { + break; //User cancled the process. + } + //initializing default attributes String query = ""; String searchEngineDomain = ""; @@ -254,25 +300,21 @@ private void getURLs() { continue; } - SearchEngineURLQueryAnalyzer.SearchEngine se = NullEngine; + SearchEngineURLQueryAnalyzer.SearchEngine se = null; //from blackboard_attributes Collection<BlackboardAttribute> listAttributes = currentCase.getSleuthkitCase().getMatchingAttributes("Where `artifact_id` = " + artifact.getArtifactID()); //NON-NLS - getAttributes: + for (BlackboardAttribute attribute : listAttributes) { - if (context.isJobCancelled()) { - break getAll; //User cancled the process. - } if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_URL.getTypeID()) { final String urlString = attribute.getValueString(); - se = getSearchEngine(urlString); - if (!se.equals(NullEngine)) { - query = extractSearchEngineQuery(attribute.getValueString()); - if (query.equals("NoQuery") || query.equals("")) { //False positive match, artifact was not a query. NON-NLS - break getAttributes; - } - } else if (se.equals(NullEngine)) { - break getAttributes; //could not determine type. Will move onto next artifact - } + se = getSearchEngineFromUrl(urlString); + if (se == null) + break; + + query = extractSearchEngineQuery(se, attribute.getValueString()); + if (query.equals("")) //False positive match, artifact was not a query. NON-NLS + break; + } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_PROG_NAME.getTypeID()) { browser = attribute.getValueString(); } else if (attribute.getAttributeTypeID() == BlackboardAttribute.ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID()) { @@ -282,7 +324,7 @@ private void getURLs() { } } - if (!se.equals(NullEngine) && !query.equals("NoQuery") && !query.equals("")) { //NON-NLS + if (se != null && !query.equals("")) { //NON-NLS Collection<BlackboardAttribute> bbattributes = new ArrayList<>(); bbattributes.add(new BlackboardAttribute(ATTRIBUTE_TYPE.TSK_DOMAIN.getTypeID(), NbBundle.getMessage(this.getClass(), @@ -301,7 +343,7 @@ private void getURLs() { ++totalQueries; } } - } catch (TskException e) { + } catch (TskCoreException e) { logger.log(Level.SEVERE, "Encountered error retrieving artifacts for search engine queries", e); //NON-NLS } finally { if (context.isJobCancelled()) { @@ -329,46 +371,24 @@ private String getTotals() { public void process(Content dataSource, IngestJobContext context) { this.dataSource = dataSource; this.context = context; - this.getURLs(); + this.findSearchQueries(); logger.log(Level.INFO, "Search Engine stats: \n{0}", getTotals()); //NON-NLS } @Override void init() throws IngestModuleException { try { - PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE, false); - init2(); + PlatformUtil.extractResourceToUserConfigDir(SearchEngineURLQueryAnalyzer.class, XMLFILE, true); } catch (IOException e) { String message = NbBundle .getMessage(this.getClass(), "SearchEngineURLQueryAnalyzer.init.exception.msg", XMLFILE); logger.log(Level.SEVERE, message, e); throw new IngestModuleException(message); } + + loadConfigFile(); } - private void init2() { - try { - String path = PlatformUtil.getUserConfigDirectory() + File.separator + XMLFILE; - File f = new File(path); - logger.log(Level.INFO, "Load successful"); //NON-NLS - DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); - DocumentBuilder db = dbf.newDocumentBuilder(); - Document xml = db.parse(f); - xmlinput = xml; - - if (!XMLUtil.xmlIsValid(xml, SearchEngineURLQueryAnalyzer.class, XSDFILE)) { - logger.log(Level.WARNING, "Error loading Search Engines: could not validate against [" + XSDFILE + "], results may not be accurate."); //NON-NLS - } - createEngines(); - getSearchEngineNames(); - } catch (IOException e) { - logger.log(Level.SEVERE, "Was not able to load SEUQAMappings.xml", e); //NON-NLS - } catch (ParserConfigurationException pce) { - logger.log(Level.SEVERE, "Unable to build XML parser", pce); //NON-NLS - } catch (SAXException sxe) { - logger.log(Level.SEVERE, "Unable to parse XML file", sxe); //NON-NLS - } - } @Override public void complete() { diff --git a/build-windows.xml b/build-windows.xml index 6b6a0904378b1ea2f2f193f0ec69b52893958f66..16f99952d33c441a2fd83c144238126a54d3853c 100644 --- a/build-windows.xml +++ b/build-windows.xml @@ -345,13 +345,12 @@ <target name="add-ai-shortcuts" description="Configure installer to have desktop short cuts"> <echo message="Adding desktop/menu shortcuts..."/> <exec executable="${ai-exe-path}"> - <arg line="/edit ${aip-path} /NewShortcut -name ${app.title} -dir DesktopFolder -target APPDIR\bin\${aut-bin-name} -icon ${inst-path}\icon.ico"/> + <arg line='/edit ${aip-path} /NewShortcut -name "${app.title} ${app.version}" -dir DesktopFolder -target APPDIR\bin\${aut-bin-name} -icon ${inst-path}\icon.ico'/> </exec> <exec executable="${ai-exe-path}"> - <arg line="/edit ${aip-path} /NewShortcut -name ${app.title} -dir SHORTCUTDIR -target APPDIR\bin\${aut-bin-name} -icon ${inst-path}\icon.ico"/> + <arg line='/edit ${aip-path} /NewShortcut -name "${app.title} ${app.version}" -dir SHORTCUTDIR -target APPDIR\bin\${aut-bin-name} -icon ${inst-path}\icon.ico'/> </exec> </target> - <target name="ai-build" description="Build the installer based on properties set by 32/64 targets."> <antcall target="add-ai-files" inheritAll="true" /> diff --git a/docs/doxygen/Doxyfile b/docs/doxygen/Doxyfile index 54d3c9cf03ec16794ac6e7db6417ae3cb2f882d1..00ffa373556533b70c97989a22aee3f6384afbcf 100644 --- a/docs/doxygen/Doxyfile +++ b/docs/doxygen/Doxyfile @@ -675,14 +675,10 @@ INPUT = main.dox \ native_libs.dox \ ../../Core/src \ ../../CoreLibs/src \ - ../../ExifParser/src \ - ../../HashDatabase/src \ ../../KeywordSearch/src \ ../../RecentActivity/src \ - ../../ScalpelCarver/src \ - ../../SevenZip/src \ +#../../ScalpelCarver/src \ ../../Testing/src \ - ../../Timeline/src \ ../../thunderbirdparser/src # This tag can be used to specify the character encoding of the source files diff --git a/nbproject/project.properties b/nbproject/project.properties index 4c0a0a88b453aa6ce962e2dcf4b8e5a77df1afef..55eff9f604bf9f88edff15c28f2699b181efdd95 100644 --- a/nbproject/project.properties +++ b/nbproject/project.properties @@ -4,12 +4,12 @@ app.title=Autopsy ### lowercase version of above app.name=${branding.token} ### if left unset, version will default to today's date -app.version=3.1.0_Beta +app.version=3.1.0 ### Build type isn't used at this point, but it may be useful ### Must be one of: DEVELOPMENT, RELEASE +#build.type=RELEASE build.type=DEVELOPMENT project.org.sleuthkit.autopsy.imageanalyzer=ImageAnalyzer -#build.type=DEVELOPMENT update_versions=false #custom JVM options #Note: can be higher on 64 bit systems, should be in sync with build.xml diff --git a/test/script/regression.py b/test/script/regression.py index 34242663e9bea2377b788acfad2a4bf3d8b4c796..3b331a36b33d5fc1cab698f8febdea8e2e591024 100755 --- a/test/script/regression.py +++ b/test/script/regression.py @@ -16,6 +16,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + from tskdbdiff import TskDbDiff, TskDbDiffException import codecs import datetime @@ -37,7 +38,6 @@ import re import zipfile import zlib -import srcupdater from regression_utils import * import shutil import ntpath @@ -86,6 +86,18 @@ Day = 0 +def usage(): + print ("-f PATH single file") + print ("-r rebuild") + print ("-l PATH path to config file") + print ("-u Ignore unallocated space") + print ("-k Do not delete SOLR index") + print ("-v verbose mode") + print ("-e ARG Enable exception mode with given string") + print ("-h help") + print ("-fr Do not download new images each time") + + #----------------------# # Main # #----------------------# @@ -93,10 +105,12 @@ def main(): """Parse the command-line arguments, create the configuration, and run the tests.""" args = Args() parse_result = args.parse() - test_config = TestConfiguration(args) # The arguments were given wrong: if not parse_result: return + test_config = TestConfiguration(args) + + # Download images unless they asked not to if(not args.fr): antin = ["ant"] antin.append("-f") @@ -144,9 +158,10 @@ def run_tests(test_config): time.sleep(10) Reports.write_html_foot(test_config.html_log) - - if test_config.jenkins: - setupAttachments(Errors.errors_out, test_config) + + # This code was causing errors with paths, so its disabled + #if test_config.jenkins: + # copyErrorFiles(Errors.errors_out, test_config) if all([ test_data.overall_passed for test_data in test_data_list ]): pass @@ -858,8 +873,8 @@ def _compare_report_files(gold_path, output_path): unordered list in the html report files, or (0, 0) if the lenghts are the same. """ - gold_file = open(gold_path) - output_file = open(output_path) + gold_file = open(gold_path, encoding='utf-8') + output_file = open(output_path, encoding='utf-8') goldHtml = gold_file.read() outputHtml = output_file.read() goldHtml = goldHtml[goldHtml.find("<ul>"):] @@ -1675,7 +1690,7 @@ def parse(self): except: print("Error: No exception string given.") elif arg == "-h" or arg == "--help": - print(usage()) + usage() return False elif arg == "-fr" or arg == "--forcerun": print("Not downloading new images") @@ -1888,7 +1903,7 @@ def find_file_in_dir(dir, name, ext): except: raise DirNotFoundException(dir) -def setupAttachments(attachments, test_config): +def copyErrorFiles(attachments, test_config): """Move email attachments to the location specified in the config file. Used for Jenkins build. @@ -1917,6 +1932,11 @@ class OS: LINUX, MAC, WIN, CYGWIN = range(4) if __name__ == "__main__": + + if sys.hexversion < 0x03000000: + print("Python 3 required") + sys.exit(1) + global SYS if _platform == "linux" or _platform == "linux2": SYS = OS.LINUX diff --git a/test/script/srcupdater.py b/test/script/srcupdater.py deleted file mode 100755 index f270500944a7b095e008b3edb3db14165964431b..0000000000000000000000000000000000000000 --- a/test/script/srcupdater.py +++ /dev/null @@ -1,224 +0,0 @@ -import codecs -import datetime -import logging -import os -import re -import shutil -import socket -import sqlite3 -import subprocess -import sys -from sys import platform as _platform -import time -import traceback -import xml -from xml.dom.minidom import parse, parseString -import Emailer -from regression_utils import * - -def compile(errore, attachli, parsedin, branch): - global to - global server - global subj - global email_enabled - global redo - global tryredo - global failedbool - global errorem - errorem = errore - global attachl - attachl = attachli - global passed - global parsed - parsed = parsedin - passed = True - tryredo = False - redo = True - while(redo): - passed = True - if(passed): - gitPull("sleuthkit", branch) - if(passed): - vsBuild() - print("TSK") - if(passed): - gitPull("autopsy", branch) - if(passed): - antBuild("datamodel", False, branch) - print("DataModel") - if(passed): - antBuild("autopsy", True, branch) - print("Aut") - if(passed): - redo = False - else: - print("Compile Failed") - time.sleep(3600) - attachl = [] - errorem = "The test standard didn't match the gold standard.\n" - failedbool = False - if(tryredo): - errorem = "" - errorem += "Rebuilt properly.\n" - if email_enabled: - Emailer.send_email(to, server, subj, errorem, attachl) - attachl = [] - passed = True - -#Pulls from git -def gitPull(TskOrAutopsy, branch): - global SYS - global errorem - global attachl - ccwd = "" - gppth = make_local_path("..", "GitPullOutput" + TskOrAutopsy + ".txt") - attachl.append(gppth) - gpout = open(gppth, 'a') - if TskOrAutopsy == "sleuthkit": - ccwd = os.path.join("..", "..", "..", "sleuthkit") - else: - ccwd = os.path.join("..", "..") - print("Resetting " + TskOrAutopsy) - call = ["git", "reset", "--hard"] - subprocess.call(call, stdout=sys.stdout, cwd=ccwd) - print("Checking out " + branch) - call = ["git", "checkout", branch] - subprocess.call(call, stdout=sys.stdout, cwd=ccwd) - toPull = "https://www.github.com/sleuthkit/" + TskOrAutopsy - call = ["git", "pull", toPull, branch] - if TskOrAutopsy == "sleuthkit": - ccwd = os.path.join("..", "..", "..", "sleuthkit") - else: - ccwd = os.path.join("..", "..") - subprocess.call(call, stdout=sys.stdout, cwd=ccwd) - gpout.close() - -#Builds TSK as a win32 applicatiion -def vsBuild(): - global redo - global tryredo - global passed - global parsed - #Please ensure that the current working directory is $autopsy/testing/script - oldpath = os.getcwd() - os.chdir(os.path.join("..", "..", "..","sleuthkit", "win32")) - vs = [] - vs.append("/cygdrive/c/windows/microsoft.NET/framework/v4.0.30319/MSBuild.exe") - vs.append(os.path.join("Tsk-win.sln")) - vs.append("/p:configuration=release") - vs.append("/p:platform=x64") - vs.append("/t:clean") - vs.append("/t:rebuild") - print(vs) - VSpth = make_local_path("..", "VSOutput.txt") - VSout = open(VSpth, 'a') - subprocess.call(vs, stdout=VSout) - VSout.close() - os.chdir(oldpath) - chk = os.path.join("..", "..", "..","sleuthkit", "win32", "x64", "Release", "libtsk_jni.dll") - if not os.path.exists(chk): - print("path doesn't exist") - global errorem - global attachl - global email_enabled - if(not tryredo): - errorem += "LIBTSK C++ failed to build.\n" - attachl.append(VSpth) - if email_enabled: - Emailer.send_email(parsed, errorem, attachl, False) - tryredo = True - passed = False - redo = True - -#Builds Autopsy or the Datamodel -def antBuild(which, Build, branch): - print("building: ", which) - global redo - global passed - global tryredo - global parsed - directory = os.path.join("..", "..") - ant = [] - if which == "datamodel": - directory = os.path.join("..", "..", "..", "sleuthkit", "bindings", "java") - ant.append("ant") - ant.append("-f") - ant.append(directory) - ant.append("clean") - if(Build): - ant.append("build") - else: - ant.append("dist") - antpth = make_local_path("..", "ant" + which + "Output.txt") - antout = open(antpth, 'a') - succd = subprocess.call(ant, stdout=antout) - antout.close() - global errorem - global attachl - global email_enabled - global to - global subj - global server - if which == "datamodel": - chk = os.path.join("..", "..", "..","sleuthkit", "bindings", "java", "dist", "TSK_DataModel.jar") - try: - open(chk) - except IOError as e: - if(not tryredo): - errorem += "DataModel Java build failed on branch " + branch + ".\n" - attachl.append(antpth) - if email_enabled: - Emailer.send_email(to, server, subj, errorem, attachl) - passed = False - tryredo = True - elif (succd != 0 and (not tryredo)): - errorem += "Autopsy build failed on branch " + branch + ".\n" - attachl.append(antpth) - Emailer.send_email(to, server, subj, errorem, attachl) - tryredo = True - elif (succd != 0): - passed = False - - -def main(): - global email_enabled - global to - global server - global subj - errore = "" - attachli = [] - config_file = "" - arg = sys.argv.pop(0) - arg = sys.argv.pop(0) - config_file = arg - arg = sys.argv.pop(0) - branch = arg - parsedin = parse(config_file) - try: - to = parsedin.getElementsByTagName("email")[0].getAttribute("value").encode().decode("utf_8") - server = parsedin.getElementsByTagName("mail_server")[0].getAttribute("value").encode().decode("utf_8") - subj = parsedin.getElementsByTagName("subject")[0].getAttribute("value").encode().decode("utf_8") - except Exception: - email_enabled = False - # email_enabled = (to is not None) and (server is not None) and (subj is not None) - email_enabled = False - compile(errore, attachli, parsedin, branch) - -class OS: - LINUX, MAC, WIN, CYGWIN = range(4) -if __name__ == "__main__": - global SYS - if _platform == "linux" or _platform == "linux2": - SYS = OS.LINUX - elif _platform == "darwin": - SYS = OS.MAC - elif _platform == "win32": - SYS = OS.WIN - elif _platform == "cygwin": - SYS = OS.CYGWIN - - if SYS is OS.WIN or SYS is OS.CYGWIN: - main() - else: - print("We only support Windows and Cygwin at this time.") - diff --git a/test/script/tskdbdiff.py b/test/script/tskdbdiff.py index a403b39838fc7806580b375f3e6851ccda89fc0c..e5a2ec2dc0d66d7b7c1e4276e46e0fc80cb7ccef 100755 --- a/test/script/tskdbdiff.py +++ b/test/script/tskdbdiff.py @@ -1,3 +1,5 @@ +# Requires python3 + import re import sqlite3 import subprocess @@ -8,9 +10,7 @@ import sys class TskDbDiff(object): - """Represents the differences between the gold and output databases. - - Contains methods to compare two databases. + """Compares two TSK/Autospy SQLite databases. Attributes: gold_artifacts: @@ -31,13 +31,14 @@ def __init__(self, output_db, gold_db, output_dir=None, gold_bb_dump=None, gold_ """Constructor for TskDbDiff. Args: - output_db_path: a pathto_File, the output database - gold_db_path: a pathto_File, the gold database - output_dir: (optional) a pathto_Dir, the location where the generated files will be put. - gold_bb_dump: (optional) a pathto_File, the location where the gold blackboard dump is located - gold_dump: (optional) a pathto_File, the location where the gold non-blackboard dump is located - verbose: (optional) a boolean, should the diff results be printed to stdout? + output_db_path: path to output database (non-gold standard) + gold_db_path: path to gold database + output_dir: (optional) Path to folder where generated files will be put. + gold_bb_dump: (optional) path to file where the gold blackboard dump is located + gold_dump: (optional) path to file where the gold non-blackboard dump is located + verbose: (optional) a boolean, if true, diff results are sent to stdout. """ + self.output_db_file = output_db self.gold_db_file = gold_db self.output_dir = output_dir @@ -57,22 +58,29 @@ def run_diff(self): Raises: TskDbDiffException: if an error occurs while diffing or dumping the database """ + self._init_diff() + # generate the gold database dumps if necessary if self._generate_gold_dump: TskDbDiff._dump_output_db_nonbb(self.gold_db_file, self.gold_dump) if self._generate_gold_bb_dump: TskDbDiff._dump_output_db_bb(self.gold_db_file, self.gold_bb_dump) - # generate the output database dumps - TskDbDiff.dump_output_db(self.output_db_file, self._dump, self._bb_dump) + # generate the output database dumps (both DB and BB) + TskDbDiff._dump_output_db_nonbb(self.output_db_file, self._dump) + TskDbDiff._dump_output_db_bb(self.output_db_file, self._bb_dump) + # Compare non-BB dump_diff_pass = self._diff(self._dump, self.gold_dump, self._dump_diff) + + # Compare BB bb_dump_diff_pass = self._diff(self._bb_dump, self.gold_bb_dump, self._bb_dump_diff) self._cleanup_diff() return dump_diff_pass, bb_dump_diff_pass + def _init_diff(self): """Set up the necessary files based on the arguments given at construction""" if self.output_dir is None: @@ -91,6 +99,7 @@ def _init_diff(self): self.gold_bb_dump = TskDbDiff._get_tmp_file("GoldSortedData", ".txt") self.gold_dump = TskDbDiff._get_tmp_file("GoldDBDump", ".txt") + def _cleanup_diff(self): if self.output_dir is None: #cleanup temp files @@ -100,30 +109,41 @@ def _cleanup_diff(self): os.remove(self._dump_diff) if os.path.isfile(self._bb_dump_diff): os.remove(self._bb_dump_diff) + if self.gold_bb_dump is None: os.remove(self.gold_bb_dump) os.remove(self.gold_dump) + def _diff(self, output_file, gold_file, diff_path): """Compare two text files. Args: - output_file: a pathto_File, the output text file - gold_file: a pathto_File, the input text file + output_file: a pathto_File, the latest text file + gold_file: a pathto_File, the gold text file + diff_path: The file to write the differences to + Returns False if different """ - if(not os.path.isfile(output_file)): + + if (not os.path.isfile(output_file)): return False - output_data = codecs.open(output_file, "r", "utf_8").read() - gold_data = codecs.open(gold_file, "r", "utf_8").read() - if (not(gold_data == output_data)): - diff_file = codecs.open(diff_path, "wb", "utf_8") - dffcmdlst = ["diff", gold_file, output_file] - subprocess.call(dffcmdlst, stdout = diff_file) + if (not os.path.isfile(gold_file)): return False - else: + + # It is faster to read the contents in and directly compare + output_data = codecs.open(output_file, "r", "utf_8").read() + gold_data = codecs.open(gold_file, "r", "utf_8").read() + if (gold_data == output_data): return True + # If they are different, invoke 'diff' + diff_file = codecs.open(diff_path, "wb", "utf_8") + dffcmdlst = ["diff", gold_file, output_file] + subprocess.call(dffcmdlst, stdout = diff_file) + return False + + def _dump_output_db_bb(db_file, bb_dump_file): """Dumps sorted text results to the given output location. @@ -134,12 +154,15 @@ def _dump_output_db_bb(db_file, bb_dump_file): db_file: a pathto_File, the output database. bb_dump_file: a pathto_File, the sorted dump file to write to """ + unsorted_dump = TskDbDiff._get_tmp_file("dump_data", ".txt") conn = sqlite3.connect(db_file) conn.text_factory = lambda x: x.decode("utf-8", "ignore") conn.row_factory = sqlite3.Row + artifact_cursor = conn.cursor() - # Get the list of all artifacts + + # Get the list of all artifacts (along with type and associated file) # @@@ Could add a SORT by parent_path in here since that is how we are going to later sort it. artifact_cursor.execute("SELECT tsk_files.parent_path, tsk_files.name, blackboard_artifact_types.display_name, blackboard_artifacts.artifact_id FROM blackboard_artifact_types INNER JOIN blackboard_artifacts ON blackboard_artifact_types.artifact_type_id = blackboard_artifacts.artifact_type_id INNER JOIN tsk_files ON tsk_files.obj_id = blackboard_artifacts.obj_id") database_log = codecs.open(unsorted_dump, "wb", "utf_8") @@ -148,9 +171,11 @@ def _dump_output_db_bb(db_file, bb_dump_file): counter = 0 artifact_count = 0 artifact_fail = 0 + # Cycle through artifacts try: while (row != None): + # File Name and artifact type if(row["parent_path"] != None): database_log.write(row["parent_path"] + row["name"] + ' <artifact type="' + row["display_name"] + '" > ') @@ -166,22 +191,14 @@ def _dump_output_db_bb(db_file, bb_dump_file): art_id = str(row["artifact_id"]) attribute_cursor.execute("SELECT blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double FROM blackboard_attributes INNER JOIN blackboard_attribute_types ON blackboard_attributes.attribute_type_id = blackboard_attribute_types.attribute_type_id WHERE artifact_id =? ORDER BY blackboard_attributes.source, blackboard_attribute_types.display_name, blackboard_attributes.value_type, blackboard_attributes.value_text, blackboard_attributes.value_int32, blackboard_attributes.value_int64, blackboard_attributes.value_double", [art_id]) attributes = attribute_cursor.fetchall() - except sqlite3.Error as e: - msg = "Attributes in artifact id (in output DB)# " + str(row["artifact_id"]) + " encountered an error: " + str(e) +" .\n" - print("Attributes in artifact id (in output DB)# ", str(row["artifact_id"]), " encountered an error: ", str(e)) - print() - looptry = False - artifact_fail += 1 - database_log.write('Error Extracting Attributes') - database_log.close() - raise TskDbDiffException(msg) - # Print attributes - if(looptry == True): + # Print attributes if (len(attributes) == 0): + # @@@@ This should be </artifact> database_log.write(' <artifact/>\n') row = artifact_cursor.fetchone() continue + src = attributes[0][0] for attr in attributes: attr_value_index = 3 + attr["value_type"] @@ -191,8 +208,10 @@ def _dump_output_db_bb(db_file, bb_dump_file): numvals += 1 if(numvals > 1): msg = "There were too many values for attribute type: " + attr["display_name"] + " for artifact with id #" + str(row["artifact_id"]) + ".\n" + if(not attr["source"] == src): msg = "There were inconsistent sources for artifact with id #" + str(row["artifact_id"]) + ".\n" + try: attr_value_as_string = str(attr[attr_value_index]) #if((type(attr_value_as_string) != 'unicode') or (type(attr_value_as_string) != 'str')): @@ -203,7 +222,21 @@ def _dump_output_db_bb(db_file, bb_dump_file): except IOError as e: print("IO error") raise TskDbDiffException("Unexpected IO error while writing to database log." + str(e)) - + + except sqlite3.Error as e: + msg = "Attributes in artifact id (in output DB)# " + str(row["artifact_id"]) + " encountered an error: " + str(e) +" .\n" + print("Attributes in artifact id (in output DB)# ", str(row["artifact_id"]), " encountered an error: ", str(e)) + print() + looptry = False + artifact_fail += 1 + database_log.write('Error Extracting Attributes') + database_log.close() + raise TskDbDiffException(msg) + finally: + attribute_cursor.close() + + + # @@@@ This should be </artifact> database_log.write(' <artifact/>\n') row = artifact_cursor.fetchone() @@ -213,7 +246,6 @@ def _dump_output_db_bb(db_file, bb_dump_file): raise TskDbDiffException("Unexpected error while dumping blackboard database: " + str(e)) finally: database_log.close() - attribute_cursor.close() artifact_cursor.close() conn.close() @@ -221,6 +253,7 @@ def _dump_output_db_bb(db_file, bb_dump_file): srtcmdlst = ["sort", unsorted_dump, "-o", bb_dump_file] subprocess.call(srtcmdlst) + def _dump_output_db_nonbb(db_file, dump_file): """Dumps a database to a text file. @@ -230,11 +263,18 @@ def _dump_output_db_nonbb(db_file, dump_file): db_file: a pathto_File, the database file to dump dump_file: a pathto_File, the location to dump the non-blackboard database items """ + + # Make a copy that we can modify backup_db_file = TskDbDiff._get_tmp_file("tsk_backup_db", ".db") shutil.copy(db_file, backup_db_file) + #print (backup_db_file) + # We sometimes get situations with messed up permissions + os.chmod (backup_db_file, 0o777) + conn = sqlite3.connect(backup_db_file) id_path_table = build_id_table(conn.cursor()) conn.text_factory = lambda x: x.decode("utf-8", "ignore") + # Delete the blackboard tables conn.execute("DROP TABLE blackboard_artifacts") conn.execute("DROP TABLE blackboard_attributes") @@ -253,6 +293,7 @@ def _dump_output_db_nonbb(db_file, dump_file): # cleanup the backup os.remove(backup_db_file) + def dump_output_db(db_file, dump_file, bb_dump_file): """Dumps the given database to text files for later comparison. @@ -264,6 +305,7 @@ def dump_output_db(db_file, dump_file, bb_dump_file): TskDbDiff._dump_output_db_nonbb(db_file, dump_file) TskDbDiff._dump_output_db_bb(db_file, bb_dump_file) + def _get_tmp_file(base, ext): time = datetime.datetime.now().time().strftime("%H%M%f") return os.path.join(os.environ['TMP'], base + time + ext) @@ -356,5 +398,9 @@ def main(): if __name__ == "__main__": + if sys.hexversion < 0x03000000: + print("Python 3 required") + sys.exit(1) + sys.exit(main())