From a0ab975f77c878554d6121194db570841616246b Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Wed, 26 Oct 2022 09:04:12 -0400 Subject: [PATCH 01/20] Update snapchat.py --- scripts/artifacts/snapchat.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/scripts/artifacts/snapchat.py b/scripts/artifacts/snapchat.py index a9f161e3..c9e4dd50 100755 --- a/scripts/artifacts/snapchat.py +++ b/scripts/artifacts/snapchat.py @@ -1,5 +1,6 @@ import bcrypt import xml.etree.ElementTree as ET +import datetime from scripts.artifact_report import ArtifactHtmlReport from scripts.artifacts.mewe import APP_NAME @@ -316,6 +317,7 @@ def _parse_xml(xml_file, xml_file_name, report_folder, title, report_name): tree = ET.parse(xml_file) data_headers = ('Key', 'Value') data_list = [] + unix_stamps = ['INSTALL_ON_DEVICE_TIMESTAMP','LONG_CLIENT_ID_DEVICE_TIMESTAMP','FIRST_LOGGED_IN_ON_DEVICE_TIMESTAMP'] root = tree.getroot() for node in root: @@ -325,6 +327,11 @@ def _parse_xml(xml_file, xml_file_name, report_folder, title, report_name): except: value = node.text + if node.attrib['name'] in unix_stamps: + value = datetime.datetime.fromtimestamp(int(value)/1000).strftime('%Y-%m-%d %H:%M:%S.%f') + else: + pass + data_list.append((node.attrib['name'], value)) _make_reports(f'{APP_NAME} - {report_name}', data_headers, data_list, report_folder, xml_file_name) From b363c9515fd7f31677a2ec81cf63862b075ed6f0 Mon Sep 17 00:00:00 2001 From: "theAtropos4n6 (Evangelos D.)" <70748441+theAtropos4n6@users.noreply.github.com> Date: Thu, 3 Nov 2022 19:39:38 +0200 Subject: [PATCH 02/20] Update FacebookMessenger.py I noticed that from at least version 379.1.0.23.114 onwards the app changed the name of the db from threads_db2 to ssus.USER-ID.threads_db2. Hence the update. The parser seems to still work just fine. However further research may be needed though to verify if anything else changed. --- scripts/artifacts/FacebookMessenger.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/artifacts/FacebookMessenger.py b/scripts/artifacts/FacebookMessenger.py index 609af254..816987c2 100755 --- a/scripts/artifacts/FacebookMessenger.py +++ b/scripts/artifacts/FacebookMessenger.py @@ -30,7 +30,7 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): else: typeof ='' - if file_found.endswith('threads_db2-uid'): + if file_found.endswith('threads_db2-uid') or (file_found.startswith('ssus.') and file_found.endswith('threads_db2')): source_file = file_found.replace(seeker.directory, '') userid = '' data_list = [] @@ -209,6 +209,6 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): __artifacts__ = { "FacebookMessenger": ( "Facebook Messenger", - ('*/threads_db2*'), + ('*/*threads_db2*'), get_FacebookMessenger) -} \ No newline at end of file +} From 9da777bf219693f0d39e641f2923cd7135931b61 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Sun, 6 Nov 2022 20:04:13 -0500 Subject: [PATCH 03/20] Bumble Parser --- scripts/artifacts/bumble.py | 642 ++++++++++++++++++++++++++++++++++++ scripts/report.py | 4 + 2 files changed, 646 insertions(+) create mode 100644 scripts/artifacts/bumble.py diff --git a/scripts/artifacts/bumble.py b/scripts/artifacts/bumble.py new file mode 100644 index 00000000..d6c5a660 --- /dev/null +++ b/scripts/artifacts/bumble.py @@ -0,0 +1,642 @@ +import sqlite3 +import os +import shutil +import textwrap +import blackboxprotobuf + +from packaging import version +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly + +def get_bumble(files_found, report_folder, seeker, wrap_text): + + source_file_settings = '' + source_file_chat_db = '' + settings_file = '' + chat_db = '' + user_name = '' + user_email = '' + user_phone = '' + user_id = '' + user_age = '' + user_birthdate = '' + city = '' + country = '' + user_occupation = '' + user_education = '' + user_aboutme = '' + data_list = [] + + for file_found in files_found: + + file_found = str(file_found) + file_name = os.path.basename(file_found) + if file_name == 'ChatComDatabase': # skip -journal and other files + chat_db = file_found + source_file_chat_db = file_found.replace(seeker.directory, '') + continue + + elif file_name.endswith('='): + settings_file = file_found + source_file_settings = file_found.replace(seeker.directory, '') + continue + + if settings_file != '': + with open(settings_file, 'rb') as f: + + pb = f.read() + message = blackboxprotobuf.decode_message(pb) + types = {'0': {'name': '', 'type': 'int'}, + '1': {'name': '', 'type': 'int'}, + '2': {'message_typedef': {'1': {'message_typedef': {'1': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}, + '2': {'alt_typedefs': {'1': {'1': {'name': '', + 'type': 'int'}, + '2': {'message_typedef': {'1': {'name': 'user_id', + 'type': 'str'}, + '10': {'message_typedef': {}, + 'name': '', + 'type': 'message'}, + '100': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '2': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '10': {'name': '', + 'type': 'int'}, + '12': {'name': '', + 'type': 'bytes'}, + '13': {'name': '', + 'type': 'bytes'}, + '16': {'message_typedef': {'3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '2': {'name': '', + 'type': 'bytes'}, + '20': {'name': '', + 'type': 'int'}, + '21': {'message_typedef': {'12': {'name': '', + 'type': 'int'}, + '13': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}, + '24': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '2': {'name': '', + 'type': 'int'}, + '5': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '31': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '5': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '27': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': 'phone_number', + 'type': 'str'}, + '5': {'name': '', + 'type': 'int'}, + '6': {'name': '', + 'type': 'int'}, + '7': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '2': {'message_typedef': {'9': {'name': '', + 'type': 'fixed64'}}, + 'name': '', + 'type': 'message'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'int'}, + '5': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}, + '3': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '6': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '8': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '4': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '11': {'name': 'user_email', + 'type': 'str'}, + '1110': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '1160': {'message_typedef': {}, + 'name': '', + 'type': 'message'}, + '1161': {'message_typedef': {}, + 'name': '', + 'type': 'message'}, + '1162': {'message_typedef': {}, + 'name': '', + 'type': 'message'}, + '1163': {'name': '', + 'type': 'bytes'}, + '1410': {'name': '', + 'type': 'int'}, + '1424': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '1442': {'name': '', + 'type': 'int'}, + '200': {'name': 'user_name', + 'type': 'str'}, + '210': {'name': 'user_age', + 'type': 'int'}, + '220': {'name': 'user_birthdate', + 'type': 'str'}, + '230': {'name': '', + 'type': 'int'}, + '291': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'int'}, + '310': {'name': '', + 'type': 'int'}, + '340': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '2': {'name': '', + 'type': 'bytes'}, + '26': {'name': '', + 'type': 'int'}, + '27': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '5': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '6': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}, + '341': {'name': '', + 'type': 'bytes'}, + '370': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '10': {'name': '', + 'type': 'int'}, + '11': {'name': '', + 'type': 'int'}, + '13': {'name': '', + 'type': 'int'}, + '14': {'name': '', + 'type': 'bytes'}, + '15': {'name': '', + 'type': 'int'}, + '18': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '17': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}, + '26': {'name': '', + 'type': 'int'}, + '27': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '5': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '6': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '8': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '19': {'name': '', + 'type': 'int'}, + '20': {'name': '', + 'type': 'int'}, + '21': {'name': '', + 'type': 'bytes'}, + '22': {'message_typedef': {'12': {'name': '', + 'type': 'int'}, + '13': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}, + '24': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '2': {'name': '', + 'type': 'int'}, + '5': {'name': '', + 'type': 'int'}, + '9': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '32': {'name': '', + 'type': 'int'}, + '48': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '23': {'name': '', + 'type': 'int'}, + '25': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'bytes'}, + '8': {'name': '', + 'type': 'int'}, + '9': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '380': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '3': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '12': {'name': '', + 'type': 'bytes'}, + '2': {'name': '', + 'type': 'bytes'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'int'}, + '5': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '6': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '5': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}, + '4': {'name': '', + 'type': 'int'}, + '41': {'name': '', + 'type': 'int'}, + '420': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '10': {'name': '', + 'type': 'bytes'}, + '2': {'name': '', + 'type': 'bytes'}, + '3': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '421': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '10': {'name': '', + 'type': 'bytes'}, + '2': {'name': '', + 'type': 'bytes'}, + '3': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '490': {'alt_typedefs': {'1': {'1': {'name': '', + 'type': 'bytes'}, + '15': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'bytes'}, + '5': {'name': '', + 'type': 'int'}, + '9': {'name': '', + 'type': 'int'}}}, + 'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '15': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'message_typedef': {}, + 'name': '', + 'type': 'message'}, + '5': {'name': '', + 'type': 'int'}, + '9': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '493': {'message_typedef': {}, + 'name': '', + 'type': 'message'}, + '50': {'name': '', + 'type': 'int'}, + '530': {'name': '', + 'type': 'bytes'}, + '890': {'name': '', + 'type': 'int'}, + '91': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': 'country_full', + 'type': 'str'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'bytes'}, + '5': {'name': '', + 'type': 'bytes'}, + '6': {'name': '', + 'type': 'int'}, + '7': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '93': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': 'city_full', + 'type': 'str'}}, + 'name': '', + 'type': 'message'}, + '930': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}}}, + 'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'alt_typedefs': {'1': {'1': {'name': '', + 'type': 'int'}}}, + 'message_typedef': {'1': {'message_typedef': {'1': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'message_typedef': {'1': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}, + '2': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '14': {'name': '', + 'type': 'int'}, + '18': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'int'}, + '4': {'name': '', + 'type': 'int'}, + '5': {'name': '', + 'type': 'int'}, + '6': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '2': {'name': '', + 'type': 'int'}, + '20': {'name': '', + 'type': 'int'}, + '21': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'int'}, + '3': {'name': '', + 'type': 'int'}, + '4': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}, + '26': {'name': '', + 'type': 'bytes'}, + '27': {'message_typedef': {'1': {'name': '', + 'type': 'bytes'}, + '2': {'message_typedef': {'9': {'name': '', + 'type': 'fixed64'}}, + 'name': '', + 'type': 'message'}, + '3': {'name': '', + 'type': 'bytes'}, + '4': {'name': '', + 'type': 'int'}, + '5': {'message_typedef': {'1': {'name': '', + 'type': 'int'}, + '2': {'name': '', + 'type': 'bytes'}, + '3': {'name': '', + 'type': 'bytes'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}, + '3': {'name': '', + 'type': 'int'}, + '4': {'name': '', + 'type': 'bytes'}, + '7': {'name': '', + 'type': 'int'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}}, + 'name': '', + 'type': 'message'}, + '482': {'name': '', 'type': 'fixed64'}} + + values, types = blackboxprotobuf.decode_message(pb, types) + + user_name = values['2']['1'][2]['2-1']['2']['user_name'] + user_email = values['2']['1'][2]['2-1']['2']['user_email'] + user_phone = values['2']['1'][2]['2-1']['2']['100']['2'][0]['phone_number'] + user_id = values['2']['1'][2]['2-1']['2']['user_id'] + user_age = values['2']['1'][2]['2-1']['2']['user_age'] + user_birthdate = values['2']['1'][2]['2-1']['2']['user_birthdate'] + city = values['2']['1'][2]['2-1']['2']['93']['city_full'] + country = values['2']['1'][2]['2-1']['2']['91']['country_full'] + user_occupation = values['2']['1'][2]['2-1']['2']['490'][0]['4'] + user_education = values['2']['1'][2]['2-1']['2']['490'][1]['4'] + user_aboutme = values['2']['1'][2]['2-1']['2']['490'][3]['4'] + + data_list.append((user_name,user_email,user_phone,user_id,user_age,user_birthdate,city,country,user_occupation,user_education,user_aboutme)) + + if len(data_list) > 0: + report = ArtifactHtmlReport('Bumble - User Settings') + report.start_artifact_report(report_folder, 'Bumble - User Settings') + report.add_script() + data_headers = ('User Name','Email','Phone','ID','Age','Birthdate','City','Country','Occupation','Education','About') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + + report.write_artifact_data_table(data_headers, data_list, source_file_settings) + report.end_artifact_report() + + tsvname = f'Bumble - User Settings' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Bumble - User Settings' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Bumble - User Settings data available') + + db = open_sqlite_db_readonly(chat_db) + cursor = db.cursor() + cursor.execute(''' + SELECT + datetime(message.created_timestamp/1000,'unixepoch') as 'Created Timestamp', + datetime(message.modified_timestamp/1000,'unixepoch') as 'Modified Timestamp', + message.sender_id, + message.recipient_id, + json_extract(message.payload, '$.text'), + json_extract(message.payload, '$.url'), + message.payload_type, + case message.is_incoming + when 0 then 'Outgoing' + when 1 then 'Incoming' + end as 'Message Direction', + message.conversation_id as 'Conversation ID', + message.id as 'Message ID', + conversation_info.user_name + from message + left join conversation_info on conversation_info.user_id = message.conversation_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Bumble - Chat Messages') + report.start_artifact_report(report_folder, 'Bumble - Chat Messages') + report.add_script() + data_headers = ('Created Timestamp','Modified Timestamp','Sender ID','Sender Name','Recipient ID','Recipient Name','Message Text','Message URL','Message Type','Message Direction','Conversation ID','Message ID') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_list = [] + for row in all_rows: + + if row[7] == 'Outgoing': + data_list.append((row[0],row[1],row[2],str(user_name + ' (local user)'),row[3],row[10],row[4],row[5],row[6],row[7],row[8],row[9])) + else: + data_list.append((row[0],row[1],row[2],row[10],row[3],str(user_name + ' (local user)'),row[4],row[5],row[6],row[7],row[8],row[9])) + + report.write_artifact_data_table(data_headers, data_list, source_file_chat_db) + report.end_artifact_report() + + tsvname = f'Bumble - Chat Messages' + tsv(report_folder, data_headers, data_list, tsvname) + + else: + logfunc('No Bumble - Chat Messages data available') + + cursor.execute(''' + SELECT + user_name, + age, + gender, + case game_mode + when 0 then 'Bumble Date' + when 1 then 'Bumble Friends' + when 5 then 'Bumble Bizz' + end, + user_image_url, + user_id, + encrypted_user_id + FROM conversation_info + ORDER BY user_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Bumble - Matches') + report.start_artifact_report(report_folder, 'Bumble - Matches') + report.add_script() + data_headers = ('User Name','Age','Gender','Mode','Profile Image URL','User ID','Encrypted User ID') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6])) + + report.write_artifact_data_table(data_headers, data_list, source_file_chat_db) + report.end_artifact_report() + + tsvname = f'Bumble - Matches' + tsv(report_folder, data_headers, data_list, tsvname) + + else: + logfunc('No Bumble - Matches data available') + + db.close() + +__artifacts__ = { + "bumble": ( + "Bumble", + ('*/com.bumble.app/databases/ChatComDatabase*','*/com.bumble.app/files/c2V0dGluZ3M='), + get_bumble) +} \ No newline at end of file diff --git a/scripts/report.py b/scripts/report.py index 263c7748..fb2a49bb 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -41,6 +41,10 @@ def get_icon_name(category, artifact): elif artifact.find('BATTERY') >=0: icon = 'battery-charging' else: icon = 'bar-chart-2' elif category == 'BLUETOOTH CONNECTIONS': icon = 'bluetooth' + elif category == 'BUMBLE': + if artifact.find('USER SETTINGS') >= 0: icon = 'user' + if artifact.find('CHAT MESSAGES') >= 0: icon = 'message-circle' + if artifact.find('MATCHES') >= 0: icon = 'smile' elif category == 'CAST': icon = 'cast' elif category == 'FITBIT': icon = 'watch' elif category == 'CALL LOGS': icon = 'phone' From f2f5e30cd41457e65cffc954950f7fd7de4dbec2 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Mon, 7 Nov 2022 13:52:08 -0500 Subject: [PATCH 04/20] Version bump / contact details --- scripts/artifacts/bumble.py | 6 ++++++ scripts/version_info.py | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/scripts/artifacts/bumble.py b/scripts/artifacts/bumble.py index d6c5a660..341b595d 100644 --- a/scripts/artifacts/bumble.py +++ b/scripts/artifacts/bumble.py @@ -1,3 +1,9 @@ +# Module Description: Parses Bumble chats, matches and user details +# Author: @KevinPagano3 +# Date: 2022-11-07 +# Artifact version: 0.0.1 +# Requirements: none + import sqlite3 import os import shutil diff --git a/scripts/version_info.py b/scripts/version_info.py index 085d140d..de416554 100755 --- a/scripts/version_info.py +++ b/scripts/version_info.py @@ -1,4 +1,4 @@ -aleapp_version = '3.1.1' +aleapp_version = '3.1.2' # Contributors List # Format = [ Name, Blog-url, Twitter-handle, Github-url] From 79494de58bde1b5de42f4263551e4058aa4f13bb Mon Sep 17 00:00:00 2001 From: abrignoni Date: Sun, 13 Nov 2022 11:13:56 -0500 Subject: [PATCH 05/20] Fix simple bugs --- scripts/artifacts/gmail.py | 2 +- scripts/artifacts/googleFitGMS.py | 82 ++++++++++++------------- scripts/artifacts/googlePlaySearches.py | 6 +- scripts/artifacts/pSettings.py | 6 +- scripts/artifacts/wifiConfigstore.py | 7 ++- 5 files changed, 57 insertions(+), 46 deletions(-) diff --git a/scripts/artifacts/gmail.py b/scripts/artifacts/gmail.py index 00822316..6ed6de37 100755 --- a/scripts/artifacts/gmail.py +++ b/scripts/artifacts/gmail.py @@ -37,7 +37,7 @@ def get_gmailActive(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Gmail - Active') report.start_artifact_report(report_folder, 'Gmail - Active') report.add_script() - data_headers = ('Active Gmail Address',) # final , needed for table formatting + data_headers = ('Active Gmail Address','') # final , needed for table formatting data_list = [] data_list.append((activeAccount, ''))# We only expect one active account report.write_artifact_data_table(data_headers, data_list, file_found) diff --git a/scripts/artifacts/googleFitGMS.py b/scripts/artifacts/googleFitGMS.py index 1469e8ba..4df3b68d 100755 --- a/scripts/artifacts/googleFitGMS.py +++ b/scripts/artifacts/googleFitGMS.py @@ -14,49 +14,49 @@ def get_googleFitGMS(files_found, report_folder, seeker, wrap_text): for file_found in files_found: file_found = str(file_found) - # if not file_found.endswith('fitness.db'): - # continue # Skip all other files + if file_found.endswith('fitness.db'): + break # Skip all other files - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - cursor.execute(''' - SELECT - datetime(Sessions.start_time/1000,'unixepoch') AS "Activity Start Time", - datetime(Sessions.end_time/1000,'unixepoch') AS "Activity End Time", - Sessions.app_package AS "Contributing App", - CASE - WHEN Sessions.activity=7 THEN "Walking" - WHEN Sessions.activity=8 THEN "Running" - WHEN Sessions.activity=72 THEN "Sleeping" - ELSE Sessions.activity - END AS "Activity Type", - Sessions.name AS "Activity Name", - Sessions.description AS "Activity Description" - FROM - Sessions - ORDER BY "Activity Start Time" ASC - ''') + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + cursor.execute(''' + SELECT + datetime(Sessions.start_time/1000,'unixepoch') AS "Activity Start Time", + datetime(Sessions.end_time/1000,'unixepoch') AS "Activity End Time", + Sessions.app_package AS "Contributing App", + CASE + WHEN Sessions.activity=7 THEN "Walking" + WHEN Sessions.activity=8 THEN "Running" + WHEN Sessions.activity=72 THEN "Sleeping" + ELSE Sessions.activity + END AS "Activity Type", + Sessions.name AS "Activity Name", + Sessions.description AS "Activity Description" + FROM + Sessions + ORDER BY "Activity Start Time" ASC + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Google Fit (GMS)') - report.start_artifact_report(report_folder, 'Activity Sessions') - report.add_script() - data_headers = ('Activity Start Time','Activity End Time','Contributing App','Activity Type','Activity Name','Activity Description') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5])) - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Google Fit (GMS) - Activity Sessions' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Google Fit (GMS) - Activity Sessions' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Google Fit (GMS) - Activity Sessions data available') + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Google Fit (GMS)') + report.start_artifact_report(report_folder, 'Activity Sessions') + report.add_script() + data_headers = ('Activity Start Time','Activity End Time','Contributing App','Activity Type','Activity Name','Activity Description') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5])) + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Google Fit (GMS) - Activity Sessions' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Google Fit (GMS) - Activity Sessions' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Google Fit (GMS) - Activity Sessions data available') __artifacts__ = { "GoogleFitGMS": ( diff --git a/scripts/artifacts/googlePlaySearches.py b/scripts/artifacts/googlePlaySearches.py index 1cb57ef0..1492c15e 100755 --- a/scripts/artifacts/googlePlaySearches.py +++ b/scripts/artifacts/googlePlaySearches.py @@ -6,7 +6,11 @@ def get_googlePlaySearches(files_found, report_folder, seeker, wrap_text): - file_found = str(files_found[0]) + for file_found in files_found: + file_found = str(file_found) + if file_found.endswith('suggestions.db'): + break # Skip all other files + db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' diff --git a/scripts/artifacts/pSettings.py b/scripts/artifacts/pSettings.py index 590faf9e..9a9f7ad0 100755 --- a/scripts/artifacts/pSettings.py +++ b/scripts/artifacts/pSettings.py @@ -6,7 +6,11 @@ def get_pSettings(files_found, report_folder, seeker, wrap_text): - file_found = str(files_found[0]) + for file_found in files_found: + file_found = str(file_found) + if file_found.endswith('googlesettings.db'): + break# Skip all other files + db = open_sqlite_db_readonly(file_found) cursor = db.cursor() cursor.execute(''' diff --git a/scripts/artifacts/wifiConfigstore.py b/scripts/artifacts/wifiConfigstore.py index 0115bd80..fb0868b2 100755 --- a/scripts/artifacts/wifiConfigstore.py +++ b/scripts/artifacts/wifiConfigstore.py @@ -44,8 +44,11 @@ def get_wifiConfigstore(files_found, report_folder, seeker, wrap_text): logdevinfo(f'SSID: {splitted[1]}') if (elem.attrib.get('name')) == 'PreSharedKey': - splitted = elem.text.split('"') - logdevinfo(f'Pre-Shared Key: {splitted[1]}') + try: + splitted = elem.text.split('"') + logdevinfo(f'Pre-Shared Key ASCII: {splitted[1]}') + except: + logdevinfo(f'Pre-Shared Key 64 hex digits raw PSK: {elem.text}') if (elem.attrib.get('name')) == 'LastConnectedTime': timestamp = datetime.datetime.fromtimestamp(int(elem.attrib.get("value"))/1000).strftime('%Y-%m-%d %H:%M:%S.%f') From 5f58507caba9501da1d3ba4fb56175254438024a Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Wed, 30 Nov 2022 16:24:34 -0500 Subject: [PATCH 06/20] Mastodon Parser v1 --- scripts/artifacts/mastodon.py | 129 ++++++++++++++++++++++++++++++++++ scripts/report.py | 8 ++- 2 files changed, 135 insertions(+), 2 deletions(-) create mode 100644 scripts/artifacts/mastodon.py diff --git a/scripts/artifacts/mastodon.py b/scripts/artifacts/mastodon.py new file mode 100644 index 00000000..752a59fd --- /dev/null +++ b/scripts/artifacts/mastodon.py @@ -0,0 +1,129 @@ +import os +import sqlite3 +import textwrap + +from packaging import version +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, tsv, timeline, kmlgen, is_platform_windows, open_sqlite_db_readonly + +def get_mastodon(files_found, report_folder, seeker, wrap_text): + + for file_found in files_found: + file_found = str(file_found) + if not file_found.endswith('.db'): + continue + + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + + cursor.execute(''' + select + datetime(time,'unixepoch'), + json_extract(recent_searches.json, '$.hashtag.name') as "Hashtag Name", + json_extract(recent_searches.json, '$.hashtag.url') as "Hashtag URL" + from recent_searches + where id like 'tag%' + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Mastodon - Hashtag Searches') + report.start_artifact_report(report_folder, 'Mastodon - Hashtag Searches') + report.add_script() + data_headers = ('Timestamp','Hashtag Name','Hashtag URL') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2])) + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Mastodon - Hashtag Searches' + tsv(report_folder, data_headers, data_list, tsvname) + + else: + logfunc('Mastodon - Hashtag Searches') + + cursor = db.cursor() + cursor.execute(''' + select + datetime(time,'unixepoch'), + json_extract(recent_searches.json, '$.account.username') as "Username", + json_extract(recent_searches.json, '$.account.display_name') as "Display Name", + json_extract(recent_searches.json, '$.account.url') as "URL", + json_extract(recent_searches.json, '$.account.id') as "ID" + from recent_searches + where id like 'acc%' + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Mastodon - Account Searches') + report.start_artifact_report(report_folder, 'Mastodon - Account Searches') + report.add_script() + data_headers = ('Timestamp','Username','Display Name','URL','ID') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4])) + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Mastodon - Account Searches' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Mastodon - Account Searches' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('Mastodon - Account Searches') + + cursor = db.cursor() + cursor.execute(''' + select + json_extract(notifications_all.json, '$.status.created_at') as "Timestamp", + json_extract(notifications_all.json, '$.account.acct') as "Notification From", + case type + when 2 then "Reply" + when 4 then "Favorite" + end, + json_extract(notifications_all.json, '$.status.url') as "Reference URL", + json_extract(notifications_all.json, '$.status.visibility') as "Visibility", + json_extract(notifications_all.json, '$.status.content') as "Content", + id + from notifications_all + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Mastodon - Notifications') + report.start_artifact_report(report_folder, 'Mastodon - Notifications') + report.add_script() + data_headers = ('Timestamp','Notification From','Notification Type','Reference URL','Visibility','Content','ID') + data_list = [] + for row in all_rows: + + notification_timestamp = row[0].replace('T', ' ').replace('Z', '') + + data_list.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6])) + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Mastodon - Notifications' + tsv(report_folder, data_headers, data_list, tsvname) + + else: + logfunc('Mastodon - Notifications data available') + + db.close() + +__artifacts__ = { + "mastodon": ( + "Mastodon", + ('*/org.joinmastodon.android/databases/*.db*'), + get_mastodon) +} diff --git a/scripts/report.py b/scripts/report.py index fb2a49bb..21fa89cd 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -118,12 +118,16 @@ def get_icon_name(category, artifact): elif artifact.find('CHAT INFORMATION') >= 0: icon = 'message-circle' elif category == 'HIDEX': icon = 'eye-off' elif category == 'INSTALLED APPS': icon = 'package' + elif category == 'MASTODON': + if artifact.find('HASHTAG SEARCHES') >= 0: icon = 'hash' + elif artifact.find('ACCOUNT SEARCHES') >= 0: icon = 'user' + elif artifact.find('NOTIFICATIONS') >= 0: icon = 'bell' elif category == 'MEDIA METADATA': icon = 'file-plus' + elif category == 'MEGA': icon = 'message-circle' + elif category == 'MEWE': icon = 'message-circle' elif category == 'MY FILES': if artifact.find('MY FILES DB - CACHE MEDIA') >=0: icon = 'image' else: icon = 'file-plus' - elif category == 'MEGA': icon = 'message-circle' - elif category == 'MEWE': icon = 'message-circle' elif category == 'NOW PLAYING': icon = 'music' elif category == 'POWER EVENTS': if artifact.find('POWER OFF RESET'): icon = 'power' From 7c5039e9447edee614a65058ee553018ab83d839 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Wed, 7 Dec 2022 16:22:24 -0500 Subject: [PATCH 07/20] Updates --- scripts/artifacts/Turbo_Battery.py | 2 - scripts/artifacts/mastodon.py | 424 +++++++++++++++++++++-------- scripts/report.py | 7 +- scripts/version_info.py | 2 +- 4 files changed, 324 insertions(+), 111 deletions(-) diff --git a/scripts/artifacts/Turbo_Battery.py b/scripts/artifacts/Turbo_Battery.py index 4eded5d3..de4e39f4 100755 --- a/scripts/artifacts/Turbo_Battery.py +++ b/scripts/artifacts/Turbo_Battery.py @@ -1,8 +1,6 @@ import sqlite3 import io -import json import os -import shutil import textwrap from packaging import version diff --git a/scripts/artifacts/mastodon.py b/scripts/artifacts/mastodon.py index 752a59fd..12b5e49b 100644 --- a/scripts/artifacts/mastodon.py +++ b/scripts/artifacts/mastodon.py @@ -1,129 +1,341 @@ +# Module Description: Parses Mastodon timeline, notifications and searches +# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) +# Date: 2022-12-07 +# Artifact version: 0.0.1 +# Requirements: BeautifulSoup + +import datetime +import json import os import sqlite3 import textwrap +from bs4 import BeautifulSoup from packaging import version from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, kmlgen, is_platform_windows, open_sqlite_db_readonly +from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly def get_mastodon(files_found, report_folder, seeker, wrap_text): + account_db = '' + account_json = '' + source_file_account_db = '' + source_file_account_json = '' + for file_found in files_found: - file_found = str(file_found) - if not file_found.endswith('.db'): - continue + file_name = str(file_found) + + if file_name.lower().endswith('.db'): + accout_db = str(file_found) + source_file_account_db = file_found.replace(seeker.directory, '') + + if file_name.lower().endswith('accounts.json'): + account_json = str(file_found) + source_file_account_json = file_found.replace(seeker.directory, '') - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - - cursor.execute(''' - select - datetime(time,'unixepoch'), - json_extract(recent_searches.json, '$.hashtag.name') as "Hashtag Name", - json_extract(recent_searches.json, '$.hashtag.url') as "Hashtag URL" - from recent_searches - where id like 'tag%' - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Mastodon - Hashtag Searches') - report.start_artifact_report(report_folder, 'Mastodon - Hashtag Searches') - report.add_script() - data_headers = ('Timestamp','Hashtag Name','Hashtag URL') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Mastodon - Hashtag Searches' - tsv(report_folder, data_headers, data_list, tsvname) - - else: - logfunc('Mastodon - Hashtag Searches') - - cursor = db.cursor() - cursor.execute(''' - select - datetime(time,'unixepoch'), - json_extract(recent_searches.json, '$.account.username') as "Username", - json_extract(recent_searches.json, '$.account.display_name') as "Display Name", - json_extract(recent_searches.json, '$.account.url') as "URL", - json_extract(recent_searches.json, '$.account.id') as "ID" - from recent_searches - where id like 'acc%' - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Mastodon - Account Searches') - report.start_artifact_report(report_folder, 'Mastodon - Account Searches') - report.add_script() - data_headers = ('Timestamp','Username','Display Name','URL','ID') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'Mastodon - Account Searches' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Mastodon - Account Searches' - timeline(report_folder, tlactivity, data_list, data_headers) - - else: - logfunc('Mastodon - Account Searches') - - cursor = db.cursor() - cursor.execute(''' - select - json_extract(notifications_all.json, '$.status.created_at') as "Timestamp", - json_extract(notifications_all.json, '$.account.acct') as "Notification From", - case type - when 2 then "Reply" - when 4 then "Favorite" - end, - json_extract(notifications_all.json, '$.status.url') as "Reference URL", - json_extract(notifications_all.json, '$.status.visibility') as "Visibility", - json_extract(notifications_all.json, '$.status.content') as "Content", - id - from notifications_all - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Mastodon - Notifications') - report.start_artifact_report(report_folder, 'Mastodon - Notifications') - report.add_script() - data_headers = ('Timestamp','Notification From','Notification Type','Reference URL','Visibility','Content','ID') - data_list = [] - for row in all_rows: + if file_name.lower().endswith('.json') and file_name.lower().startswith('instance'): + instance_json = str(file_found) + source_file_instance_json = file_found.replace(seeker.directory, '') + + #if not file_found.endswith('.db'): + #continue + + db = open_sqlite_db_readonly(accout_db) + cursor = db.cursor() + + #Get Mastodon user search details for hashtags + cursor.execute(''' + select + datetime(time,'unixepoch') as "Search Timestamp", + json_extract(recent_searches.json, '$.hashtag.name') as "Hashtag Name", + json_extract(recent_searches.json, '$.hashtag.url') as "Hashtag URL" + from recent_searches + where id like 'tag%' + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Hashtag searches from the current user in Mastodon' + report = ArtifactHtmlReport('Mastodon - Hashtag Searches') + report.start_artifact_report(report_folder, 'Mastodon - Hashtag Searches') + report.add_script() + data_headers = ('Timestamp','Hashtag Name','Hashtag URL') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2])) + + report.write_artifact_data_table(data_headers, data_list, source_file_account_db) + report.end_artifact_report() + + tsvname = f'Mastodon - Hashtag Searches' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Mastodon - Hashtag Searches' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('Mastodon - Hashtag Searches') + + #Get Mastodon user search details for accounts + cursor = db.cursor() + cursor.execute(''' + select + datetime(time,'unixepoch') as "Search Timestamp", + json_extract(recent_searches.json, '$.account.username') as "Username", + json_extract(recent_searches.json, '$.account.display_name') as "Display Name", + json_extract(recent_searches.json, '$.account.url') as "URL", + json_extract(recent_searches.json, '$.account.id') as "ID" + from recent_searches + where id like 'acc%' + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Account searches from the current user in Mastodon' + report = ArtifactHtmlReport('Mastodon - Account Searches') + report.start_artifact_report(report_folder, 'Mastodon - Account Searches') + report.add_script() + data_headers = ('Timestamp','Username','Display Name','URL','ID') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4])) + + report.write_artifact_data_table(data_headers, data_list, source_file_account_db) + report.end_artifact_report() + + tsvname = f'Mastodon - Account Searches' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Mastodon - Account Searches' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('Mastodon - Account Searches') + + #Get Mastodon user notification details + cursor = db.cursor() + cursor.execute(''' + select + json_extract(notifications_all.json, '$.created_at') as "Notification Created Timestamp", + json_extract(notifications_all.json, '$.account.acct') as "Notification From", + case type + when 0 then "Follow" + when 2 then "Reply" + when 3 then "Boost" + when 4 then "Favorite" + end as "Notification Type", + json_extract(notifications_all.json, '$.status.url') as "Reference URL", + json_extract(notifications_all.json, '$.status.content') as "Text Content", + json_extract(notifications_all.json, '$.status.visibility') as "Visibility", + json_extract(notifications_all.json, '$.status.created_at') as "Status Created Timestamp", + id + from notifications_all + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Notification details for the current user in Mastodon' + report = ArtifactHtmlReport('Mastodon - Notifications') + report.start_artifact_report(report_folder, 'Mastodon - Notifications') + report.add_script() + data_headers = ('Notification Created Timestamp','Notification From','Notification Type','Reference URL','Text Content','Visibility','Status Created Timestamp','ID') + data_list = [] + data_list_stripped = [] + for row in all_rows: - notification_timestamp = row[0].replace('T', ' ').replace('Z', '') + notification_timestamp = '' + status_timestamp = '' + + notification_timestamp = row[0].replace('T', ' ').replace('Z', '') + if row[6] != None: + status_timestamp = row[6].replace('T', ' ').replace('Z', '') + else: + status_timestamp = '' + + data_list.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],status_timestamp,row[7])) - data_list.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6])) + if row[4] != None: + soup = BeautifulSoup(row[4], 'html.parser').text + data_list_stripped.append((notification_timestamp,row[1],row[2],row[3],soup,row[5],status_timestamp,row[7])) + else: + data_list_stripped.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],status_timestamp,row[7])) + + report.write_artifact_data_table(data_headers, data_list, source_file_account_db, html_no_escape=['Text Content']) + report.end_artifact_report() + + tsvname = f'Mastodon - Notifications' + tsv(report_folder, data_headers, data_list_stripped, tsvname) + + tlactivity = f'Mastodon - Notifications' + timeline(report_folder, tlactivity, data_list_stripped, data_headers) + + else: + logfunc('Mastodon - Notifications data available') + + #Get Mastodon user timeline details + cursor = db.cursor() + cursor.execute(''' + select + json_extract(home_timeline.json, '$.created_at') as "Created Timestamp", + json_extract(home_timeline.json, '$.account.acct') as "Account Name", + json_extract(home_timeline.json, '$.application.name') as "App Name", + json_extract(home_timeline.json, '$.content') as "Content", + json_extract(home_timeline.json, '$.url') as "URL", + json_extract(home_timeline.json, '$.reblog.account.acct') as "Boosted Account Name", + json_extract(home_timeline.json, '$.reblog.content') as "Boosted Content", + json_extract(home_timeline.json, '$.reblog.url') as "Boosted URL", + json_extract(home_timeline.json, '$.replies_count') as "Replies Count", + json_extract(home_timeline.json, '$.reblogs_count') as "Boosted Count", + json_extract(home_timeline.json, '$.favourites_count') as "Favorites Count", + json_extract(home_timeline.json, '$.visibility') as "Visibility", + id + from home_timeline + ''') - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Timeline details for the current users feed in Mastodon' + report = ArtifactHtmlReport('Mastodon - Timeline') + report.start_artifact_report(report_folder, 'Mastodon - Timeline') + report.add_script() + data_headers = ('Timestamp','Account Name','App Name','Text Content','URL','Boosted Account Name','Boosted Content','Boosted URL','Replies Count','Boosted Count','Favorites Count','Visibility','ID') + data_list = [] + data_list_stripped = [] + for row in all_rows: + + notification_timestamp = row[0].replace('T', ' ').replace('Z', '') + + data_list.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) - tsvname = f'Mastodon - Notifications' - tsv(report_folder, data_headers, data_list, tsvname) + #if str(row[3]) and str(row[6]) == '': + if row[3] and row[6] != None: + data_list_stripped.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) + + else: + soup1 = '' + soup2 = '' + if str(row[3]).startswith('

'): + soup1 = BeautifulSoup(row[3], 'html.parser').text + elif str(row[6]).startswith('

'): + soup2 = BeautifulSoup(row[6], 'html.parser').text + + data_list_stripped.append((notification_timestamp,row[1],row[2],soup1,row[4],soup2,row[6],row[7],row[8],row[9],row[10],row[11],row[12])) + + report.write_artifact_data_table(data_headers, data_list, source_file_account_db, html_no_escape=['Text Content','Boosted Content']) + report.end_artifact_report() + + tsvname = f'Mastodon - Timeline' + tsv(report_folder, data_headers, data_list_stripped, tsvname) + + tlactivity = f'Mastodon - Timeline' + timeline(report_folder, tlactivity, data_list_stripped, data_headers) + + else: + logfunc('Mastodon - Timeline data available') + + db.close() + + #Get Mastodon account user details + with open(account_json, encoding = 'utf-8', mode = 'r') as f: + data = json.loads(f.read()) + data_list_json = [] + + for x in data['accounts']: + account_created_ts = str(x['b'].get('created_at','')).replace('T', ' ').replace('Z', '') + account_name = x['b'].get('acct','') + account_username = x['b'].get('username','') + account_display_name = x['b'].get('display_name','') + account_url = x['b'].get('url','') + account_id = x['b'].get('id','') + followers_count = x['b'].get('followers_count','') + following_count = x['b'].get('following_count','') + account_bio = x['b'].get('note','') + if str(account_bio).startswith('

'): + account_bio = BeautifulSoup(account_bio, 'html.parser').text - else: - logfunc('Mastodon - Notifications data available') + account_avatar = x['b'].get('avatar','') + instance_name = x.get('c','') + + alert_favorite = str(x['j']['alerts'].get('favourite','')).title() + alert_follow = str(x['j']['alerts'].get('follow','')).title() + alert_mention = str(x['j']['alerts'].get('mention','')).title() + alert_poll = str(x['j']['alerts'].get('poll','')).title() + alert_reblog = str(x['j']['alerts'].get('reblog','')).title() + + flag_bot = str(x['b'].get('bot','')).title() + flag_discoverable = str(x['b'].get('discoverable','')).title() + flag_locked = str(x['b'].get('locked','')).title() + flag_suspended = str(x['b'].get('suspended','')).title() + + data_list_json.append((account_created_ts,account_name,account_username,account_display_name,account_url,account_id,followers_count,following_count,account_bio,account_avatar,instance_name,alert_favorite,alert_follow,alert_mention,alert_poll,alert_reblog,flag_bot,flag_discoverable,flag_locked,flag_suspended)) + + num_entries = len(data_list_json) + if num_entries > 0: + description = 'Account details for the current Mastodon user.' + report = ArtifactHtmlReport('Mastodon - Account Details') + report.start_artifact_report(report_folder, 'Mastodon - Account Details', description) + report.add_script() + data_headers = ('Created Timestamp','Name','User Name','Display Name','URL','ID','Followers Count','Following Count','Bio','Avatar URL','Instance Name','Favorite Alerts','Follow Alerts','Mention Alerts','Poll Alerts','Boost Alerts','Is Bot','Is Discoverable','Is Locked','Is Suspended') - db.close() + report.write_artifact_data_table(data_headers, data_list_json, file_found) + report.end_artifact_report() + + tsvname = f'Mastodon - Account Details' + tsv(report_folder, data_headers, data_list_json, tsvname) + + tlactivity = f'Mastodon - Account Details' + timeline(report_folder, tlactivity, data_list_json, data_headers) + else: + logfunc('No Mastodon - Account Details data available') + + + #Get Mastodon instance details + with open(file_found, encoding = 'utf-8', mode = 'r') as f: + data = json.loads(f.read()) + + data_list_instance_json = [] + + instance_updated_ts = datetime.datetime.utcfromtimestamp(int(data.get('last_updated',''))/1000).strftime('%Y-%m-%d %H:%M:%S') + instance_uri = data['instance'].get('uri','') + instance_title = data['instance'].get('title','') + instance_description = data['instance'].get('description','') + instance_version = data['instance'].get('version','') + instance_user_count = data['instance']['stats'].get('user_count','') + instance_status_count = data['instance']['stats'].get('status_count','') + instance_invites = str(data['instance'].get('invites_enabled','')).title() + instance_registrations = str(data['instance'].get('registrations','')).title() + instance_email = data['instance'].get('email','') + instance_contact = data['instance']['contact_account'].get('url','') + instance_thumbnail = data['instance'].get('thumbnail','') + + data_list_instance_json.append((instance_updated_ts,instance_uri,instance_title,instance_description,instance_version,instance_user_count,instance_status_count,instance_invites,instance_registrations,instance_email,instance_contact,instance_thumbnail)) + + num_entries = len(data_list_instance_json) + if num_entries > 0: + description = 'Details for the instance of Mastodon the user has joined.' + report = ArtifactHtmlReport('Mastodon - Instance Details') + report.start_artifact_report(report_folder, 'Mastodon - Instance Details', description) + report.add_script() + data_headers = ('Last Updated Timestamp','URI','Title','Description','Version','User Count','Status Count','Invites Enable','Registrations Enabled','Admin Contact Email','Owner','Thumbnail') + + report.write_artifact_data_table(data_headers, data_list_instance_json, file_found) + report.end_artifact_report() + + tsvname = f'Mastodon - Instance Details' + tsv(report_folder, data_headers, data_list_instance_json, tsvname) + + tlactivity = f'Mastodon - Instance Details' + timeline(report_folder, tlactivity, data_list_instance_json, data_headers) + else: + logfunc('No Mastodon - Instance Details data available') __artifacts__ = { "mastodon": ( "Mastodon", - ('*/org.joinmastodon.android/databases/*.db*'), + ('*/org.joinmastodon.android/databases/*.db*','*/org.joinmastodon.android/files/*.json'), get_mastodon) } diff --git a/scripts/report.py b/scripts/report.py index 21fa89cd..b7e8de13 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -119,9 +119,12 @@ def get_icon_name(category, artifact): elif category == 'HIDEX': icon = 'eye-off' elif category == 'INSTALLED APPS': icon = 'package' elif category == 'MASTODON': - if artifact.find('HASHTAG SEARCHES') >= 0: icon = 'hash' - elif artifact.find('ACCOUNT SEARCHES') >= 0: icon = 'user' + if artifact.find('ACCOUNT DETAILS') >= 0: icon = 'user' + elif artifact.find('ACCOUNT SEARCHES') >= 0: icon = 'users' + elif artifact.find('HASHTAG SEARCHES') >= 0: icon = 'hash' + elif artifact.find('INSTANCE DETAILS') >= 0: icon = 'info' elif artifact.find('NOTIFICATIONS') >= 0: icon = 'bell' + elif artifact.find('TIMELINE') >= 0: icon = 'activity' elif category == 'MEDIA METADATA': icon = 'file-plus' elif category == 'MEGA': icon = 'message-circle' elif category == 'MEWE': icon = 'message-circle' diff --git a/scripts/version_info.py b/scripts/version_info.py index de416554..ceb9cb46 100755 --- a/scripts/version_info.py +++ b/scripts/version_info.py @@ -1,4 +1,4 @@ -aleapp_version = '3.1.2' +aleapp_version = '3.1.3' # Contributors List # Format = [ Name, Blog-url, Twitter-handle, Github-url] From d119961861cfc69daef678a20e03b7fee5659df2 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Mon, 12 Dec 2022 16:38:39 -0500 Subject: [PATCH 08/20] Tusky Parser --- scripts/artifacts/mastodon.py | 8 +- scripts/artifacts/tusky.py | 185 ++++++++++++++++++++++++++++++++++ scripts/report.py | 3 + scripts/version_info.py | 2 +- 4 files changed, 191 insertions(+), 7 deletions(-) create mode 100644 scripts/artifacts/tusky.py diff --git a/scripts/artifacts/mastodon.py b/scripts/artifacts/mastodon.py index 12b5e49b..ff87a823 100644 --- a/scripts/artifacts/mastodon.py +++ b/scripts/artifacts/mastodon.py @@ -1,7 +1,7 @@ # Module Description: Parses Mastodon timeline, notifications and searches # Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) # Date: 2022-12-07 -# Artifact version: 0.0.1 +# Artifact version: 0.0.2 # Requirements: BeautifulSoup import datetime @@ -36,9 +36,6 @@ def get_mastodon(files_found, report_folder, seeker, wrap_text): if file_name.lower().endswith('.json') and file_name.lower().startswith('instance'): instance_json = str(file_found) source_file_instance_json = file_found.replace(seeker.directory, '') - - #if not file_found.endswith('.db'): - #continue db = open_sqlite_db_readonly(accout_db) cursor = db.cursor() @@ -122,7 +119,7 @@ def get_mastodon(files_found, report_folder, seeker, wrap_text): json_extract(notifications_all.json, '$.account.acct') as "Notification From", case type when 0 then "Follow" - when 2 then "Reply" + when 2 then "Mention" when 3 then "Boost" when 4 then "Favorite" end as "Notification Type", @@ -211,7 +208,6 @@ def get_mastodon(files_found, report_folder, seeker, wrap_text): data_list.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) - #if str(row[3]) and str(row[6]) == '': if row[3] and row[6] != None: data_list_stripped.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) diff --git a/scripts/artifacts/tusky.py b/scripts/artifacts/tusky.py new file mode 100644 index 00000000..13ee0bbc --- /dev/null +++ b/scripts/artifacts/tusky.py @@ -0,0 +1,185 @@ +# Module Description: Parses Tusky timeline, notifications and searches +# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) +# Date: 2022-12-12 +# Artifact version: 0.0.1 +# Requirements: BeautifulSoup + +import datetime +import json +import os +import sqlite3 +import textwrap +from bs4 import BeautifulSoup + +from packaging import version +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly + +def get_tusky(files_found, report_folder, seeker, wrap_text): + + for file_found in files_found: + file_name = str(file_found) + + if not os.path.basename(file_name) == 'tuskyDB': # skip -journal and other files + continue + + db = open_sqlite_db_readonly(file_name) + + #Get Tusky user timeline details + cursor = db.cursor() + cursor.execute(''' + select + datetime(TimelineStatusEntity.createdAt/1000,'unixepoch'), + TimelineAccountEntity.username, + TimelineAccountEntity.displayName, + TimelineStatusEntity.url, + TimelineStatusEntity.content, + TimelineStatusEntity.attachments, + TimelineStatusEntity.reblogsCount, + TimelineStatusEntity.favouritesCount, + TimelineStatusEntity.repliesCount, + case TimelineStatusEntity.reblogged + when 0 then "" + when 1 then "True" + end, + case TimelineStatusEntity.bookmarked + when 0 then "" + when 1 then "True" + end, + case TimelineStatusEntity.favourited + when 0 then "" + when 1 then "True" + end, + case TimelineStatusEntity.sensitive + when 0 then "" + when 1 then "True" + end, + case TimelineStatusEntity.visibility + when 0 then "Unknown" + when 1 then "Public" + when 4 then "Direct" + end as "Visibility", + json_extract(TimelineStatusEntity.application, '$.name') as "Application" + from TimelineStatusEntity + left join TimelineAccountEntity on TimelineAccountEntity.serverId = TimelineStatusEntity.authorServerId + where TimelineStatusEntity.createdAt > 0 + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Timeline details for the current users feed in Tusky' + report = ArtifactHtmlReport('Tusky - Timeline') + report.start_artifact_report(report_folder, 'Tusky - Timeline') + report.add_script() + data_headers = ('Timestamp','User Name','Display Name','URL','Text Content','Attachments','Boost Count','Favorite Count','Replies Count','User Boosted?','User Bookmarked?','User Favorited?','Sensitive','Visibility','Application') + data_list = [] + data_list_stripped = [] + for row in all_rows: + data = json.loads(row[5]) + attachment_list = [] + attachments = '' + #Iterate attachments + for x in data: + attachment_item = x.get('url','') + attachment_list.append(attachment_item) + if len(attachment_list) > 0: + attachments = '\n'.join(map(str,attachment_list)) + else: + attachments = '' + data_list.append((row[0],row[1],row[2],row[3],row[4],attachments,row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14])) + + soup = '' + if str(row[4]).startswith('

'): + soup = BeautifulSoup(row[3], 'html.parser').text + + data_list_stripped.append((row[0],row[1],row[2],row[3],soup,attachment_list,row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14])) + + report.write_artifact_data_table(data_headers, data_list, file_found, html_no_escape=['Text Content']) + report.end_artifact_report() + + tsvname = f'Tusky - Timeline' + tsv(report_folder, data_headers, data_list_stripped, tsvname) + + tlactivity = f'Tusky - Timeline' + timeline(report_folder, tlactivity, data_list_stripped, data_headers) + + else: + logfunc('Tusky - Timeline data available') + + cursor = db.cursor() + cursor.execute(''' + select + accountId, + displayName, + username, + domain, + profilePictureUrl, + case notificationsEnabled + when 0 then "False" + when 1 then "True" + end, + case notificationsMentioned + when 0 then "False" + when 1 then "True" + end, + case notificationsFollowed + when 0 then "False" + when 1 then "True" + end, + case notificationsFollowRequested + when 0 then "False" + when 1 then "True" + end, + case notificationsReblogged + when 0 then "False" + when 1 then "True" + end, + case notificationsFavorited + when 0 then "False" + when 1 then "True" + end, + case notificationsPolls + when 0 then "False" + when 1 then "True" + end, + tabPreferences, + accessToken, + clientId, + clientSecret + from AccountEntity + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Account details for the current user in Tusky' + report = ArtifactHtmlReport('Tusky - Account Details') + report.start_artifact_report(report_folder, 'Tusky - Account Details') + report.add_script() + data_headers = ('Account ID','Display Name','User Name','Instance','Avatar URL','Notifications Enabled','Mentioned Notifications','Followed Notifications','Follow Requested Notifications','Boost Notifications','Favorited Notifications','Polls Notifications','Tab Preferences','Access Token','Client ID','Client Secret') + data_list = [] + for row in all_rows: + + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13],row[14],row[15])) + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Tusky - Account Details' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Tusky - Account Details' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('Tusky - Account Details data available') + + db.close() + +__artifacts__ = { + "Tusky": ( + "Tusky", + ('*/com.keylesspalace.tusky/databases/tuskyDB*'), + get_tusky) +} diff --git a/scripts/report.py b/scripts/report.py index b7e8de13..49171af3 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -164,6 +164,9 @@ def get_icon_name(category, artifact): elif artifact == 'TEAMS ACTIVITY FEED': icon = 'at-sign' elif artifact == 'TEAMS FILE INFO': icon = 'file' else: icon = 'file-text' + elif category == 'TUSKY': + if artifact.find('TIMELINE') >=0: icon = 'activity' + elif artifact.find('ACCOUNT') >=0: icon = 'user' elif category == 'VIBER': if artifact == 'VIBER - CONTACTS': icon = 'user' elif artifact == 'VIBER - MESSAGES': icon = 'message-square' diff --git a/scripts/version_info.py b/scripts/version_info.py index ceb9cb46..b558af33 100755 --- a/scripts/version_info.py +++ b/scripts/version_info.py @@ -1,4 +1,4 @@ -aleapp_version = '3.1.3' +aleapp_version = '3.1.4' # Contributors List # Format = [ Name, Blog-url, Twitter-handle, Github-url] From 480e584a4851aefd8d3f14b30588713c702e31d9 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Tue, 13 Dec 2022 10:08:55 -0500 Subject: [PATCH 09/20] Update mastodon.py adds attachment URLs --- scripts/artifacts/mastodon.py | 23 +++++++++++++++++------ 1 file changed, 17 insertions(+), 6 deletions(-) diff --git a/scripts/artifacts/mastodon.py b/scripts/artifacts/mastodon.py index ff87a823..b8b4277a 100644 --- a/scripts/artifacts/mastodon.py +++ b/scripts/artifacts/mastodon.py @@ -1,7 +1,7 @@ # Module Description: Parses Mastodon timeline, notifications and searches # Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) # Date: 2022-12-07 -# Artifact version: 0.0.2 +# Artifact version: 0.0.3 # Requirements: BeautifulSoup import datetime @@ -188,7 +188,8 @@ def get_mastodon(files_found, report_folder, seeker, wrap_text): json_extract(home_timeline.json, '$.reblogs_count') as "Boosted Count", json_extract(home_timeline.json, '$.favourites_count') as "Favorites Count", json_extract(home_timeline.json, '$.visibility') as "Visibility", - id + id, + json from home_timeline ''') @@ -199,17 +200,27 @@ def get_mastodon(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Mastodon - Timeline') report.start_artifact_report(report_folder, 'Mastodon - Timeline') report.add_script() - data_headers = ('Timestamp','Account Name','App Name','Text Content','URL','Boosted Account Name','Boosted Content','Boosted URL','Replies Count','Boosted Count','Favorites Count','Visibility','ID') + data_headers = ('Timestamp','Account Name','App Name','Text Content','Attachment URL','URL','Boosted Account Name','Boosted Content','Boosted URL','Replies Count','Boosted Count','Favorites Count','Visibility','ID') data_list = [] data_list_stripped = [] for row in all_rows: + data = json.loads(row[13]) + attachment_list = [] + attachments = '' + for x in data['media_attachments']: + attachment_item = x.get('url','') + attachment_list.append(attachment_item) + if len(attachment_list) > 0: + attachments = '\n'.join(map(str,attachment_list)) + else: + attachments = '' notification_timestamp = row[0].replace('T', ' ').replace('Z', '') - data_list.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) + data_list.append((notification_timestamp,row[1],row[2],row[3],attachments,row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) if row[3] and row[6] != None: - data_list_stripped.append((notification_timestamp,row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) + data_list_stripped.append((notification_timestamp,row[1],row[2],row[3],attachment_list,row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12])) else: soup1 = '' @@ -219,7 +230,7 @@ def get_mastodon(files_found, report_folder, seeker, wrap_text): elif str(row[6]).startswith('

'): soup2 = BeautifulSoup(row[6], 'html.parser').text - data_list_stripped.append((notification_timestamp,row[1],row[2],soup1,row[4],soup2,row[6],row[7],row[8],row[9],row[10],row[11],row[12])) + data_list_stripped.append((notification_timestamp,row[1],row[2],soup1,attachment_list,row[4],soup2,row[6],row[7],row[8],row[9],row[10],row[11],row[12])) report.write_artifact_data_table(data_headers, data_list, source_file_account_db, html_no_escape=['Text Content','Boosted Content']) report.end_artifact_report() From 1fa4706701341d7db1f6b075282bea9dfda17c6a Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Tue, 13 Dec 2022 14:24:18 -0500 Subject: [PATCH 10/20] SimpleStorage - App Launch --- scripts/artifacts/SimpleStorage_applaunch.py | 76 ++++++++++++++++++++ scripts/report.py | 2 + 2 files changed, 78 insertions(+) create mode 100644 scripts/artifacts/SimpleStorage_applaunch.py diff --git a/scripts/artifacts/SimpleStorage_applaunch.py b/scripts/artifacts/SimpleStorage_applaunch.py new file mode 100644 index 00000000..828d11f9 --- /dev/null +++ b/scripts/artifacts/SimpleStorage_applaunch.py @@ -0,0 +1,76 @@ +# Module Description: Parses SimpleStorage for application launch +# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) +# Date: 2022-12-13 +# Artifact version: 0.0.1 +# Much thanks to Josh Hickman (@josh_hickman1) for the research, testing and query + +import os +import sqlite3 +import textwrap + +from packaging import version +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly + +def get_SimpleStorage_applaunch(files_found, report_folder, seeker, wrap_text): + + for file_found in files_found: + file_name = str(file_found) + + if not os.path.basename(file_name) == 'SimpleStorage': # skip -journal and other files + continue + + db = open_sqlite_db_readonly(file_name) + + cursor = db.cursor() + cursor.execute(''' + SELECT DISTINCT + datetime(EchoAppLaunchMetricsEvents.timestampMillis/1000,'unixepoch') AS "Time App Launched", + EchoAppLaunchMetricsEvents.packageName AS "App", + CASE + WHEN EchoAppLaunchMetricsEvents.launchLocationId=1 THEN "Home Screen" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=2 THEN "Suggested Apps (Home Screen)" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=4 THEN "App Drawer" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=7 THEN "Suggested Apps (App Drawer)" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=8 THEN "Search (Top of App Drawer/GSB)" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=12 THEN "Recent Apps/Multi-Tasking Menu" + WHEN EchoAppLaunchMetricsEvents.launchLocationId=1000 THEN "Notification" + ELSE EchoAppLaunchMetricsEvents.launchLocationId + END AS "Launched From" + FROM EchoAppLaunchMetricsEvents + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = '' + report = ArtifactHtmlReport('SimpleStorage - App Launch') + report.start_artifact_report(report_folder, 'SimpleStorage - App Launch') + report.add_script() + data_headers = ('App Launched Timestamp','App Name','Launched From') + data_list = [] + data_list_stripped = [] + for row in all_rows: + + data_list.append((row[0],row[1],row[2])) + + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'SimpleStorage - App Launch' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'SimpleStorage - App Launch' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('SimpleStorage - App Launch data available') + + db.close() + +__artifacts__ = { + "SimpleStorage_applaunch": ( + "Android System Intelligence", + ('*/com.google.android.as/databases/SimpleStorage*'), + get_SimpleStorage_applaunch) +} diff --git a/scripts/report.py b/scripts/report.py index 49171af3..a05bc090 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -22,6 +22,8 @@ def get_icon_name(category, artifact): else: icon = 'user' elif category == 'ADB HOSTS': icon = 'terminal' elif category == 'AIRTAGS': icon = 'map-pin' + elif category == 'ANDROID SYSTEM INTELLIGENCE': + if artifact.find('SIMPLESTORAGE') >=0: icon = 'loader' elif category == 'FIREBASE CLOUD MESSAGING': icon = 'database' elif category == 'BURNER': if artifact.find('NUMBER INFORMATION') >= 0: icon = 'user' From 6a4d423be0efee0420108886deffe36deaf5183e Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Sat, 17 Dec 2022 11:37:22 -0500 Subject: [PATCH 11/20] Update googleQuickSearchboxRecent.py --- .../artifacts/googleQuickSearchboxRecent.py | 49 +++++++++++++++---- 1 file changed, 40 insertions(+), 9 deletions(-) diff --git a/scripts/artifacts/googleQuickSearchboxRecent.py b/scripts/artifacts/googleQuickSearchboxRecent.py index 1389fb5a..3211a799 100755 --- a/scripts/artifacts/googleQuickSearchboxRecent.py +++ b/scripts/artifacts/googleQuickSearchboxRecent.py @@ -1,11 +1,12 @@ import blackboxprotobuf +import datetime import json import os import shutil import sqlite3 from html import escape from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, is_platform_windows +from scripts.ilapfuncs import logfunc, tsv, is_platform_windows, open_sqlite_db_readonly is_windows = is_platform_windows() slash = '\\' if is_windows else '/' @@ -29,9 +30,34 @@ def recursive_convert_bytes_to_str(obj): def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): recents = [] + account_db = '' + account_name = '' + screenshot_path = '' for file_found in files_found: file_found = str(file_found) - if file_found.endswith('.jpg'): + if file_found.endswith('accounts.notifications.db'): + account_db = str(file_found) + + db = open_sqlite_db_readonly(account_db) + cursor = db.cursor() + + cursor.execute(''' + select + account_name + from accounts''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + for row in all_rows: + account_name = row[0] + + db.close() + + continue + + elif file_found.endswith('.jpg'): + screenshot_path = file_found continue # Skip jpg files, all others should be protobuf elif file_found.find('{0}mirror{0}'.format(slash)) >= 0: # Skip sbin/.magisk/mirror/data/.. , it should be duplicate data @@ -39,6 +65,8 @@ def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): elif os.path.isdir(file_found): # skip folders continue + + with open(file_found, 'rb') as f: pb = f.read() types = {'1': {'type': 'message', 'message_typedef': @@ -64,6 +92,7 @@ def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): }, 'name': ''} } values, types = blackboxprotobuf.decode_message(pb, types) items = values.get('1', None) + if items: if isinstance(items, dict): # this means only one element was found @@ -83,23 +112,25 @@ def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Google Now & Quick Search recent events') report.start_artifact_report(report_folder, 'Recent Searches & Google Now', description) report.add_script() - data_headers = ('Screenshot', 'Protobuf Data') + data_headers = ('Timestamp','Screenshot Path','Search Query','Screenshot', 'Protobuf Data') data_list = [] for file_path, items in recents: - dir_path, base_name = os.path.split(file_path) + dir_path, base_name = os.path.split(screenshot_path) for item in items: - screenshot_id = str(item.get('screenshot-id', '')) - screenshot_file_path = os.path.join(dir_path, f'{base_name}-{screenshot_id}.jpg') + screenshot_id = str(item.get('screenshot-id', '')) + search_timestamp = datetime.datetime.utcfromtimestamp(item.get('timestamp1', '')/1000).strftime('%Y-%m-%d %H:%M:%S') + search_query = str(item.get('search-query','')) + screenshot_file_path = os.path.join(dir_path, f'{account_name}-{screenshot_id}.jpg') if os.path.exists(screenshot_file_path): shutil.copy2(screenshot_file_path, report_folder) - img_html = ''.format(f'{base_name}-{screenshot_id}.jpg', folder_name) + img_html = ''.format(f'{account_name}-{screenshot_id}.jpg', folder_name) platform = is_platform_windows() if platform: img_html = img_html.replace('?', '') recursive_convert_bytes_to_str(item) # convert all 'bytes' to str - data_list.append( (img_html, '

'+ escape(json.dumps(item, indent=4)).replace('\\n', '
') +'
') ) + data_list.append((search_timestamp,screenshot_file_path,search_query,img_html, '
'+ escape(json.dumps(item, indent=4)).replace('\\n', '
') +'
')) report.write_artifact_data_table(data_headers, data_list, dir_path, html_escape=False) report.end_artifact_report() @@ -112,6 +143,6 @@ def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): __artifacts__ = { "Quicksearch_recent": ( "Google Now & QuickSearch", - ('*/com.google.android.googlequicksearchbox/files/recently/*'), + ('*/com.google.android.googlequicksearchbox/files/recently/*','*/com.google.android.googlequicksearchbox/files/accounts/*/RecentsDataStore.pb','*/com.google.android.googlequicksearchbox/databases/accounts.notifications.db'), get_quicksearch_recent) } \ No newline at end of file From 1513bdda805c21588164706bd302df55de1b221d Mon Sep 17 00:00:00 2001 From: abrignoni Date: Fri, 30 Dec 2022 23:59:42 -0500 Subject: [PATCH 12/20] gmm_storage.db parsing Pulls out GPS coordinates (to & from) for Google maps searches to include the google maps URL. --- scripts/artifacts/googleMapsGmm.py | 92 ++++++++++++++++++++++++++++++ scripts/report.py | 1 + 2 files changed, 93 insertions(+) create mode 100644 scripts/artifacts/googleMapsGmm.py diff --git a/scripts/artifacts/googleMapsGmm.py b/scripts/artifacts/googleMapsGmm.py new file mode 100644 index 00000000..9512ed0e --- /dev/null +++ b/scripts/artifacts/googleMapsGmm.py @@ -0,0 +1,92 @@ +import os +import sqlite3 +import struct +import datetime +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, tsv, timeline, kmlgen, is_platform_windows, open_sqlite_db_readonly + +def get_googleMapsGmm(files_found, report_folder, seeker, wrap_text): + + data_list = [] + for file_found in files_found: + file_found = str(file_found) + if not file_found.endswith('gmm_storage.db'): + continue # Skip all other files + + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + cursor.execute(''' + select + rowid, + _data, + _key_pri + from gmm_storage_table + ''') + all_rows = cursor.fetchall() + + for row in all_rows: + id = row[0] + data = row[1] + keypri = row[2] + + idx=data.find(b"/dir/") + + if (idx!=-1): + length=struct.unpack("1 and len(dd.split("!2d"))>1: + tolon=dd.split("!1d")[1].split("!")[0] + tolat=dd.split("!2d")[1].split("!")[0] + idx=data.find(b"\x4C\x00\x01\x67\x74\x00\x12\x4C\x6A\x61\x76\x61\x2F\x6C\x61\x6E\x67\x2F\x53\x74\x72\x69\x6E\x67\x3B\x78\x70") + if (idx!=-1): + timestamp=struct.unpack(">Q",data[idx+0x1B:idx+0x1B+8])[0] + + if directions.startswith('b\''): + directions = directions.replace('b\'','', 1) + directions = directions[:-1] + + directions = ("https://google.com/maps"+directions) + directions = f'{directions}' + + data_list.append((directions, fromlat, fromlon, tolat, tolon, id, keypri)) + + + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Google Search History Maps') + report.start_artifact_report(report_folder, 'Google Search History Maps') + report.add_script() + data_headers = ('Directions', 'Latitude', 'Longitude', 'To Latitude', 'To Longitude', 'Row ID', 'Type') + report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) + report.end_artifact_report() + + tsvname = f'Google Search History Maps' + tsv(report_folder, data_headers, data_list, tsvname) + + else: + logfunc('No Google Search History Maps data available') + + db.close() + +__artifacts__ = { + "gmm_maps": ( + "GEO Location", + ('*/data/com.google.android.apps.maps/databases/gmm_storage.db*'), + get_googleMapsGmm) +} \ No newline at end of file diff --git a/scripts/report.py b/scripts/report.py index fb2a49bb..88099b12 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -134,6 +134,7 @@ def get_icon_name(category, artifact): elif artifact.find('MESSAGES') >=0: icon = 'inbox' else: icon = 'mail' elif category == 'PROTONVPN': icon = 'shield' + elif category == 'GEO LOCATION': icon = 'map-pin' elif category == 'RCS CHATS': icon = 'message-circle' elif category == 'RECENT ACTIVITY': icon = 'activity' elif category == 'SAMSUNG SMARTTHINGS': icon = 'bluetooth' From 320ab9232507bd218bb8221b0c860ba631cb372c Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Tue, 3 Jan 2023 17:17:01 -0500 Subject: [PATCH 13/20] Gmail Labels --- scripts/artifacts/gmail_mail.py | 77 +++++++++++++++++++++++++++++++++ scripts/report.py | 4 +- 2 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 scripts/artifacts/gmail_mail.py diff --git a/scripts/artifacts/gmail_mail.py b/scripts/artifacts/gmail_mail.py new file mode 100644 index 00000000..7b88f7b2 --- /dev/null +++ b/scripts/artifacts/gmail_mail.py @@ -0,0 +1,77 @@ +# Module Description: Parses Gmail +# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) +# Date: 2023-01-03 +# Artifact version: 0.0.1 +# Requirements: BeautifulSoup + +import datetime +import json +import os +import sqlite3 +import textwrap +from bs4 import BeautifulSoup + +from packaging import version +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly + +def get_gmail_mail(files_found, report_folder, seeker, wrap_text): + + bigTopDataDB = '' + + for file_found in files_found: + file_found = str(file_found) + file_name = os.path.basename(file_found) + + if file_name.startswith('bigTopDataDB'): + + if file_name.endswith('-shm') or file_name.endswith('-wal'): + continue + + else: + bigTopDataDB = file_found + source_bigTop = file_found.replace(seeker.directory, '') + + db = open_sqlite_db_readonly(bigTopDataDB) + cursor = db.cursor() + + #Get Gmail label details + cursor.execute(''' + select + label_server_perm_id, + unread_count, + total_count, + unseen_count + from label_counts + order by label_server_perm_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Gmail mail labels' + report = ArtifactHtmlReport('Gmail - Label Details') + report.start_artifact_report(report_folder, 'Gmail - Label Details') + report.add_script() + data_headers = ('Label','Unread Count','Total Count','Unseen Count') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3])) + + report.write_artifact_data_table(data_headers, data_list, source_bigTop) + report.end_artifact_report() + + tsvname = f'Gmail - Label Details' + tsv(report_folder, data_headers, data_list, tsvname) + + else: + logfunc('Gmail - Label Details') + + db.close() + +__artifacts__ = { + "GmailMail": ( + "Gmail", + ('*/com.google.android.gm/databases/bigTopDataDB.*'), + get_gmail_mail) +} diff --git a/scripts/report.py b/scripts/report.py index 7e82d9e9..3a294479 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -32,7 +32,9 @@ def get_icon_name(category, artifact): elif category == 'PLAYGROUND VAULT': icon = 'lock' elif category == 'ENCRYPTING MEDIA APPS': icon = 'lock' elif category == 'GOOGLE MAPS VOICE GUIDANCE': icon = 'map' - elif category == 'GMAIL': icon = 'at-sign' + elif category == 'GMAIL': + if artifact.find('ACTIVE') >= 0: icon = 'at-sign' + elif artifact.find('LABEL DETAILS') >= 0: icon = 'mail' elif category == 'APP INTERACTION': icon = 'bar-chart-2' elif category == 'PRIVACY DASHBOARD': icon = 'eye' elif category == 'BASH HISTORY': icon = 'terminal' From 6d4a852158c02d1934802c28211e14e97a700238 Mon Sep 17 00:00:00 2001 From: abrignoni Date: Wed, 4 Jan 2023 20:27:54 -0500 Subject: [PATCH 14/20] Create gmailEmails.py Parser for emails in the Gmail app --- scripts/artifacts/gmailEmails.py | 122 +++++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 scripts/artifacts/gmailEmails.py diff --git a/scripts/artifacts/gmailEmails.py b/scripts/artifacts/gmailEmails.py new file mode 100644 index 00000000..0b69f071 --- /dev/null +++ b/scripts/artifacts/gmailEmails.py @@ -0,0 +1,122 @@ +import zlib +import sqlite3 +import blackboxprotobuf +import os +from datetime import datetime + +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly, media_to_html + + +def get_gmailEmails(files_found, report_folder, seeker, wrap_text): + + for file_found in files_found: + file_found = str(file_found) + + if file_found.endswith('-wal'): + continue + elif file_found.endswith('-shm'): + continue + elif os.path.basename(file_found).startswith('.'): + continue + elif os.path.basename(file_found).startswith('bigTopDataDB'): + break + + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + cursor.execute(''' + select * + from item_messages + left join item_message_attachments on item_messages.row_id = item_message_attachments.item_messages_row_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + filename = file_found + data_list = [] + + if usageentries > 0: + for row in all_rows: + id = row[7] + serverid = row[1] + attachname = row[15] + attachhash = row[16] + attachment = '' + + data = id + arreglo = bytearray(data) + arreglo = arreglo[1:] + decompressed_data = zlib.decompress(arreglo) + message,typedef = blackboxprotobuf.decode_message(decompressed_data) + + timestamp = (datetime.utcfromtimestamp(message['17']/1000)) + to = (message['1']['2'].decode()) #receiver + toname = (message['1'].get('3','')) #receiver name + if isinstance(toname, bytes): + toname = toname.decode() + + replyto = (message['11'].get('17', '')) #reply email + if isinstance(replyto, bytes): + replyto = replyto.decode() + else: + replyto = '' + + replytoname = (message['11'].get('15', '')) #reply name + if isinstance(replytoname, bytes): + replytoname = replytoname.decode() + else: + replytoname = '' + + subjectline = (message['5'].decode()) #Subject line + + if isinstance(message['6']['2'], list): + for x in message['6']['2']: + messagehtml = messagehtml + (x['3']['2'].decode()) + else: + messagehtml = (message['6']['2']['3']['2'].decode()) #HTML message + + mailedby = (message['11']['8'].decode()) #mailed by + signedby = (message['11'].get('9', '')) #signed by + if signedby != '': + signedby = signedby.decode() + + if attachname == 'noname': + attachname = '' + elif attachname is None: + attachname = '' + elif attachhash is None: + attachhash = '' + else: + for attachpath in files_found: + if attachhash in attachpath: + if attachpath.endswith(attachname): + #print(attachpath) + attachment = media_to_html(attachpath, files_found, report_folder) + + + + data_list.append((timestamp,serverid,messagehtml,attachment,attachname,to,toname,replyto,replytoname,subjectline,mailedby,signedby)) + + description = 'Gmail App Emails' + report = ArtifactHtmlReport('Gmail App Emails') + report.start_artifact_report(report_folder, 'Gmail App Emails', description) + report.add_script() + data_headers = ('Timestamp','Email ID','Message','Attachment','Attachment Name','To','To Name','Reply To','Reply To Name','Subject Line','Mailed By','Signed by') + report.write_artifact_data_table(data_headers, data_list, filename,html_escape=False) + report.end_artifact_report() + + tsvname = 'Gmail App Emails' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = 'Gmail App Emails' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('No Gmail App Emails data available') + +__artifacts__ = { + "Gmail": ( + "Gmail", + ('*/data/com.google.android.gm/databases/bigTopDataDB.*','*/data/com.google.android.gm/files/downloads/*/attachments/*/*.*'), + get_gmailEmails) +} \ No newline at end of file From ca74318f38e24cd311c72f641b61cffc59558761 Mon Sep 17 00:00:00 2001 From: abrignoni Date: Thu, 5 Jan 2023 14:52:03 -0500 Subject: [PATCH 15/20] Update gmailEmails.py Gmail app parser. --- scripts/artifacts/gmailEmails.py | 58 ++++++++++++++++++++++---------- 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/scripts/artifacts/gmailEmails.py b/scripts/artifacts/gmailEmails.py index 0b69f071..4ccc2772 100644 --- a/scripts/artifacts/gmailEmails.py +++ b/scripts/artifacts/gmailEmails.py @@ -5,7 +5,7 @@ from datetime import datetime from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import timeline, tsv, is_platform_windows, open_sqlite_db_readonly, media_to_html +from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly, media_to_html def get_gmailEmails(files_found, report_folder, seeker, wrap_text): @@ -50,11 +50,19 @@ def get_gmailEmails(files_found, report_folder, seeker, wrap_text): message,typedef = blackboxprotobuf.decode_message(decompressed_data) timestamp = (datetime.utcfromtimestamp(message['17']/1000)) - to = (message['1']['2'].decode()) #receiver - toname = (message['1'].get('3','')) #receiver name - if isinstance(toname, bytes): - toname = toname.decode() - + + to = (message.get('1', '')) #receiver + if to != '': + to = message['1'].get('2', '') + if isinstance(to, bytes): + to = to.decode() + + toname = (message.get('1', '')) #receiver name + if toname != '': + toname = message['1'].get('3', '') + if isinstance(toname, bytes): + toname = toname.decode() + replyto = (message['11'].get('17', '')) #reply email if isinstance(replyto, bytes): replyto = replyto.decode() @@ -67,18 +75,34 @@ def get_gmailEmails(files_found, report_folder, seeker, wrap_text): else: replytoname = '' - subjectline = (message['5'].decode()) #Subject line + subjectline = (message.get('5', '')) #Subject line + if subjectline != '': + if isinstance(subjectline, bytes): + subjectline = subjectline.decode() + else: + subjectline = '' - if isinstance(message['6']['2'], list): - for x in message['6']['2']: - messagehtml = messagehtml + (x['3']['2'].decode()) - else: - messagehtml = (message['6']['2']['3']['2'].decode()) #HTML message - - mailedby = (message['11']['8'].decode()) #mailed by - signedby = (message['11'].get('9', '')) #signed by - if signedby != '': - signedby = signedby.decode() + messagetest = message.get('6', '') + if messagetest != '': + messagetest = message['6'].get('2','') + if messagetest != '': + if isinstance(message['6']['2'], list): + for x in message['6']['2']: + messagehtml = messagehtml + (x['3']['2'].decode()) + else: + messagehtml = (message['6']['2']['3']['2'].decode()) #HTML message + + mailedbytest = message.get('11', '') + if mailedbytest != '': + mailedbytest = message['11'].get('8','') + if mailedbytest != '': + mailedby = (message['11']['8'].decode()) #mailed by + + signedbytest = message.get('11', '') + if signedbytest != '': + signedby = (message['11'].get('9', '')) #signed by + if signedby != '': + signedby = signedby.decode() if attachname == 'noname': attachname = '' From 10020042d335fd81f94d825f457c66c935ffe637 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Fri, 6 Jan 2023 12:00:50 -0500 Subject: [PATCH 16/20] Gmail Updates / Calendar parser --- scripts/artifacts/gmailEmails.py | 302 +++++++++++------- scripts/artifacts/gmail_mail.py | 77 ----- scripts/artifacts/googleCalendar.py | 140 ++++++++ .../artifacts/googleQuickSearchboxRecent.py | 2 - scripts/report.py | 5 + scripts/version_info.py | 2 +- 6 files changed, 341 insertions(+), 187 deletions(-) delete mode 100644 scripts/artifacts/gmail_mail.py create mode 100644 scripts/artifacts/googleCalendar.py diff --git a/scripts/artifacts/gmailEmails.py b/scripts/artifacts/gmailEmails.py index 4ccc2772..91806ab0 100644 --- a/scripts/artifacts/gmailEmails.py +++ b/scripts/artifacts/gmailEmails.py @@ -7,9 +7,13 @@ from scripts.artifact_report import ArtifactHtmlReport from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly, media_to_html - def get_gmailEmails(files_found, report_folder, seeker, wrap_text): + bigTopDataDB = '' + source_bigTop = '' + downloaderDB = '' + source_downloader = '' + for file_found in files_found: file_found = str(file_found) @@ -19,128 +23,212 @@ def get_gmailEmails(files_found, report_folder, seeker, wrap_text): continue elif os.path.basename(file_found).startswith('.'): continue - elif os.path.basename(file_found).startswith('bigTopDataDB'): - break + if os.path.basename(file_found).startswith('bigTopDataDB'): + bigTopDataDB = str(file_found) + source_bigTop = file_found.replace(seeker.directory, '') + if os.path.basename(file_found).startswith('downloader.db'): + downloaderDB = str(file_found) + source_downloader = file_found.replace(seeker.directory, '') - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - cursor.execute(''' - select * - from item_messages - left join item_message_attachments on item_messages.row_id = item_message_attachments.item_messages_row_id - ''') + if bigTopDataDB != '': + db = open_sqlite_db_readonly(bigTopDataDB) + cursor = db.cursor() + cursor.execute(''' + select * + from item_messages + left join item_message_attachments on item_messages.row_id = item_message_attachments.item_messages_row_id + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - filename = file_found - data_list = [] - - if usageentries > 0: - for row in all_rows: - id = row[7] - serverid = row[1] - attachname = row[15] - attachhash = row[16] - attachment = '' - - data = id - arreglo = bytearray(data) - arreglo = arreglo[1:] - decompressed_data = zlib.decompress(arreglo) - message,typedef = blackboxprotobuf.decode_message(decompressed_data) - - timestamp = (datetime.utcfromtimestamp(message['17']/1000)) - - to = (message.get('1', '')) #receiver - if to != '': - to = message['1'].get('2', '') - if isinstance(to, bytes): - to = to.decode() - - toname = (message.get('1', '')) #receiver name - if toname != '': - toname = message['1'].get('3', '') - if isinstance(toname, bytes): - toname = toname.decode() - - replyto = (message['11'].get('17', '')) #reply email - if isinstance(replyto, bytes): - replyto = replyto.decode() - else: - replyto = '' + all_rows = cursor.fetchall() + usageentries = len(all_rows) + filename = file_found + data_list = [] + + if usageentries > 0: + for row in all_rows: + id = row[7] + serverid = row[1] + attachname = row[15] + attachhash = row[16] + attachment = '' - replytoname = (message['11'].get('15', '')) #reply name - if isinstance(replytoname, bytes): - replytoname = replytoname.decode() - else: - replytoname = '' - - subjectline = (message.get('5', '')) #Subject line - if subjectline != '': - if isinstance(subjectline, bytes): - subjectline = subjectline.decode() + data = id + arreglo = bytearray(data) + arreglo = arreglo[1:] + decompressed_data = zlib.decompress(arreglo) + message,typedef = blackboxprotobuf.decode_message(decompressed_data) + + timestamp = (datetime.utcfromtimestamp(message['17']/1000)) + + to = (message.get('1', '')) #receiver + if to != '': + to = message['1'].get('2', '') + if isinstance(to, bytes): + to = to.decode() + + toname = (message.get('1', '')) #receiver name + if toname != '': + toname = message['1'].get('3', '') + if isinstance(toname, bytes): + toname = toname.decode() + + replyto = (message['11'].get('17', '')) #reply email + if isinstance(replyto, bytes): + replyto = replyto.decode() else: - subjectline = '' - - messagetest = message.get('6', '') - if messagetest != '': - messagetest = message['6'].get('2','') - if messagetest != '': - if isinstance(message['6']['2'], list): - for x in message['6']['2']: - messagehtml = messagehtml + (x['3']['2'].decode()) + replyto = '' + + replytoname = (message['11'].get('15', '')) #reply name + if isinstance(replytoname, bytes): + replytoname = replytoname.decode() + else: + replytoname = '' + + subjectline = (message.get('5', '')) #Subject line + if subjectline != '': + if isinstance(subjectline, bytes): + subjectline = subjectline.decode() else: - messagehtml = (message['6']['2']['3']['2'].decode()) #HTML message - - mailedbytest = message.get('11', '') - if mailedbytest != '': - mailedbytest = message['11'].get('8','') + subjectline = '' + + messagetest = message.get('6', '') + if messagetest != '': + messagetest = message['6'].get('2','') + if messagetest != '': + if isinstance(message['6']['2'], list): + for x in message['6']['2']: + messagehtml = messagehtml + (x['3']['2'].decode()) + else: + messagehtml = (message['6']['2']['3']['2'].decode()) #HTML message + + mailedbytest = message.get('11', '') if mailedbytest != '': - mailedby = (message['11']['8'].decode()) #mailed by + mailedbytest = message['11'].get('8','') + if mailedbytest != '': + mailedby = (message['11']['8'].decode()) #mailed by + + signedbytest = message.get('11', '') + if signedbytest != '': + signedby = (message['11'].get('9', '')) #signed by + if signedby != '': + signedby = signedby.decode() + + if attachname == 'noname': + attachname = '' + elif attachname is None: + attachname = '' + elif attachhash is None: + attachhash = '' + else: + for attachpath in files_found: + if attachhash in attachpath: + if attachpath.endswith(attachname): + attachment = media_to_html(attachpath, files_found, report_folder) + + data_list.append((timestamp,serverid,messagehtml,attachment,attachname,to,toname,replyto,replytoname,subjectline,mailedby,signedby)) + + description = 'Gmail - App Emails' + report = ArtifactHtmlReport('Gmail - App Emails') + report.start_artifact_report(report_folder, 'Gmail - App Emails', description) + report.add_script() + data_headers = ('Timestamp','Email ID','Message','Attachment','Attachment Name','To','To Name','Reply To','Reply To Name','Subject Line','Mailed By','Signed by') + report.write_artifact_data_table(data_headers, data_list, source_bigTop,html_escape=False) + report.end_artifact_report() - signedbytest = message.get('11', '') - if signedbytest != '': - signedby = (message['11'].get('9', '')) #signed by - if signedby != '': - signedby = signedby.decode() + tsvname = 'Gmail - App Emails' + tsv(report_folder, data_headers, data_list, tsvname) - if attachname == 'noname': - attachname = '' - elif attachname is None: - attachname = '' - elif attachhash is None: - attachhash = '' - else: - for attachpath in files_found: - if attachhash in attachpath: - if attachpath.endswith(attachname): - #print(attachpath) - attachment = media_to_html(attachpath, files_found, report_folder) - + tlactivity = 'Gmail - App Emails' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('No Gmail - App Emails data available') + + cursor = db.cursor() + + #Get Gmail label details + cursor.execute(''' + select + label_server_perm_id, + unread_count, + total_count, + unseen_count + from label_counts + order by label_server_perm_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Gmail mail labels' + report = ArtifactHtmlReport('Gmail - Label Details') + report.start_artifact_report(report_folder, 'Gmail - Label Details') + report.add_script() + data_headers = ('Label','Unread Count','Total Count','Unseen Count') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3])) + + report.write_artifact_data_table(data_headers, data_list, source_bigTop) + report.end_artifact_report() + tsvname = f'Gmail - Label Details' + tsv(report_folder, data_headers, data_list, tsvname) - data_list.append((timestamp,serverid,messagehtml,attachment,attachname,to,toname,replyto,replytoname,subjectline,mailedby,signedby)) + else: + logfunc('No Gmail - Label Details data available' ) - description = 'Gmail App Emails' - report = ArtifactHtmlReport('Gmail App Emails') - report.start_artifact_report(report_folder, 'Gmail App Emails', description) - report.add_script() - data_headers = ('Timestamp','Email ID','Message','Attachment','Attachment Name','To','To Name','Reply To','Reply To Name','Subject Line','Mailed By','Signed by') - report.write_artifact_data_table(data_headers, data_list, filename,html_escape=False) - report.end_artifact_report() - - tsvname = 'Gmail App Emails' - tsv(report_folder, data_headers, data_list, tsvname) + db.close() - tlactivity = 'Gmail App Emails' - timeline(report_folder, tlactivity, data_list, data_headers) - + if downloaderDB != '': + db = open_sqlite_db_readonly(downloaderDB) + cursor = db.cursor() + + #Get Gmail download requests + cursor.execute(''' + select + datetime(request_time_ms/1000,'unixepoch'), + account_name, + type, + caller_id, + url, + target_file_path, + target_file_size, + priority + from download_requests + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + description = 'Gmail download requests' + report = ArtifactHtmlReport('Gmail - Download Requests') + report.start_artifact_report(report_folder, 'Gmail - Download Requests') + report.add_script() + data_headers = ('Timestamp Requested','Account Name','Download Type','Message ID','URL','Target File Path','Target File Size','Priority') + data_list = [] + for row in all_rows: + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7])) + + report.write_artifact_data_table(data_headers, data_list, source_downloader) + report.end_artifact_report() + + tsvname = f'Gmail - Download Requests' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Gmail - Download Requests' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('No Gmail - Download Requests data available') + + db.close() else: - logfunc('No Gmail App Emails data available') + logfunc('No Gmail - Download Requests data available') __artifacts__ = { "Gmail": ( "Gmail", - ('*/data/com.google.android.gm/databases/bigTopDataDB.*','*/data/com.google.android.gm/files/downloads/*/attachments/*/*.*'), + ('*/data/com.google.android.gm/databases/bigTopDataDB.*','*/data/com.google.android.gm/files/downloads/*/attachments/*/*.*','*/data/com.google.android.gm/databases/downloader.db*'), get_gmailEmails) } \ No newline at end of file diff --git a/scripts/artifacts/gmail_mail.py b/scripts/artifacts/gmail_mail.py deleted file mode 100644 index 7b88f7b2..00000000 --- a/scripts/artifacts/gmail_mail.py +++ /dev/null @@ -1,77 +0,0 @@ -# Module Description: Parses Gmail -# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) -# Date: 2023-01-03 -# Artifact version: 0.0.1 -# Requirements: BeautifulSoup - -import datetime -import json -import os -import sqlite3 -import textwrap -from bs4 import BeautifulSoup - -from packaging import version -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly - -def get_gmail_mail(files_found, report_folder, seeker, wrap_text): - - bigTopDataDB = '' - - for file_found in files_found: - file_found = str(file_found) - file_name = os.path.basename(file_found) - - if file_name.startswith('bigTopDataDB'): - - if file_name.endswith('-shm') or file_name.endswith('-wal'): - continue - - else: - bigTopDataDB = file_found - source_bigTop = file_found.replace(seeker.directory, '') - - db = open_sqlite_db_readonly(bigTopDataDB) - cursor = db.cursor() - - #Get Gmail label details - cursor.execute(''' - select - label_server_perm_id, - unread_count, - total_count, - unseen_count - from label_counts - order by label_server_perm_id - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - description = 'Gmail mail labels' - report = ArtifactHtmlReport('Gmail - Label Details') - report.start_artifact_report(report_folder, 'Gmail - Label Details') - report.add_script() - data_headers = ('Label','Unread Count','Total Count','Unseen Count') - data_list = [] - for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3])) - - report.write_artifact_data_table(data_headers, data_list, source_bigTop) - report.end_artifact_report() - - tsvname = f'Gmail - Label Details' - tsv(report_folder, data_headers, data_list, tsvname) - - else: - logfunc('Gmail - Label Details') - - db.close() - -__artifacts__ = { - "GmailMail": ( - "Gmail", - ('*/com.google.android.gm/databases/bigTopDataDB.*'), - get_gmail_mail) -} diff --git a/scripts/artifacts/googleCalendar.py b/scripts/artifacts/googleCalendar.py new file mode 100644 index 00000000..4816f92e --- /dev/null +++ b/scripts/artifacts/googleCalendar.py @@ -0,0 +1,140 @@ +# Module Description: Parses provider calendars and events +# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) +# Date: 2023-01-06 +# Artifact version: 0.0.1 + +import zlib +import sqlite3 +import blackboxprotobuf +import os +from datetime import datetime + +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly + +def get_calendar(files_found, report_folder, seeker, wrap_text): + + for file_found in files_found: + file_found = str(file_found) + if file_found.endswith('-wal'): + continue + elif file_found.endswith('-shm'): + continue + elif file_found.endswith('-journal'): + continue + if os.path.basename(file_found).endswith('calendar.db'): + calendarDB = file_found + source_calendarDB = file_found.replace(seeker.directory, '') + + if os.path.basename(file_found).endswith('cal_v2a'): + g_calendarDB = file_found + source_g_calendarDB = file_found.replace(seeker.directory, '') + + db = open_sqlite_db_readonly(calendarDB) + + #Get provider calendar events + cursor = db.cursor() + cursor.execute(''' + select + datetime(Events.dtstart/1000,'unixepoch') as "Event Start Timestamp", + datetime(Events.dtend/1000,'unixepoch') as "Event End Timestamp", + Events.eventTimezone, + Events.title, + Events.description, + Events.eventLocation, + Events._sync_id, + Events.organizer, + Calendars.calendar_displayName, + case Events.allDay + when 0 then '' + when 1 then 'Yes' + end, + case Events.hasAlarm + when 0 then '' + when 1 then 'Yes' + end + from Events + left join Calendars on Calendars._id = Events.calendar_id + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + data_list = [] + for row in all_rows: + + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10])) + + description = 'Calendar - Events' + report = ArtifactHtmlReport('Calendar - Events') + report.start_artifact_report(report_folder, 'Calendar - Events', description) + report.add_script() + data_headers = ('Event Start Timestamp','Event End Timestamp','Event Timezone','Title','Description','Event Location','Sync ID','Organizer','Calendar Display Name','All Day Event','Has Alarm') + report.write_artifact_data_table(data_headers, data_list, source_calendarDB,html_escape=False) + report.end_artifact_report() + + tsvname = 'Calendar - Events' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = 'Calendar - Events' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('No Calendar - Events data available') + + #Get provider calendars + cursor = db.cursor() + cursor.execute(''' + select + datetime(cal_sync8/1000,'unixepoch') as "Last Synced Timestamp", + name, + calendar_displayName, + account_name, + account_type, + case visible + when 0 then 'No' + when 1 then 'Yes' + end, + calendar_location, + calendar_timezone, + ownerAccount, + case isPrimary + when 0 then '' + when 1 then 'Yes' + end, + calendar_color, + calendar_color_index + from Calendars + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + data_list = [] + for row in all_rows: + + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11])) + + description = 'Calendar - Calendars' + report = ArtifactHtmlReport('Calendar - Calendars') + report.start_artifact_report(report_folder, 'Calendar - Calendars', description) + report.add_script() + data_headers = ('Created Timestamp','Calendar Name','Calendar Display Name','Account Name','Account Type','Visible','Calendar Location','Timezone','Owner Account','Is Primary','Color','Color Index') + report.write_artifact_data_table(data_headers, data_list, source_calendarDB,html_escape=False) + report.end_artifact_report() + + tsvname = 'Calendar - Calendars' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = 'Calendar - Calendars' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('No Calendar - Calendars data available') + +__artifacts__ = { + "Calendar": ( + "Calendar", + ('*/data/com.google.android.calendar/databases/cal_v2a*','*/com.android.providers.calendar/databases/calendar.db*'), + get_calendar) +} \ No newline at end of file diff --git a/scripts/artifacts/googleQuickSearchboxRecent.py b/scripts/artifacts/googleQuickSearchboxRecent.py index 3211a799..6c53f76d 100755 --- a/scripts/artifacts/googleQuickSearchboxRecent.py +++ b/scripts/artifacts/googleQuickSearchboxRecent.py @@ -65,8 +65,6 @@ def get_quicksearch_recent(files_found, report_folder, seeker, wrap_text): elif os.path.isdir(file_found): # skip folders continue - - with open(file_found, 'rb') as f: pb = f.read() types = {'1': {'type': 'message', 'message_typedef': diff --git a/scripts/report.py b/scripts/report.py index 3a294479..a44b0251 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -34,6 +34,8 @@ def get_icon_name(category, artifact): elif category == 'GOOGLE MAPS VOICE GUIDANCE': icon = 'map' elif category == 'GMAIL': if artifact.find('ACTIVE') >= 0: icon = 'at-sign' + elif artifact.find('APP EMAILS') >= 0: icon = 'at-sign' + elif artifact.find('DOWNLOAD REQUESTS') >= 0: icon = 'download-cloud' elif artifact.find('LABEL DETAILS') >= 0: icon = 'mail' elif category == 'APP INTERACTION': icon = 'bar-chart-2' elif category == 'PRIVACY DASHBOARD': icon = 'eye' @@ -49,6 +51,9 @@ def get_icon_name(category, artifact): if artifact.find('USER SETTINGS') >= 0: icon = 'user' if artifact.find('CHAT MESSAGES') >= 0: icon = 'message-circle' if artifact.find('MATCHES') >= 0: icon = 'smile' + elif category == 'CALENDAR': + if artifact.find('CALENDAR - EVENTS') >=0: icon = 'calendar' + else: icon = 'calendar' elif category == 'CAST': icon = 'cast' elif category == 'FITBIT': icon = 'watch' elif category == 'CALL LOGS': icon = 'phone' diff --git a/scripts/version_info.py b/scripts/version_info.py index b558af33..5657e081 100755 --- a/scripts/version_info.py +++ b/scripts/version_info.py @@ -1,4 +1,4 @@ -aleapp_version = '3.1.4' +aleapp_version = '3.1.5' # Contributors List # Format = [ Name, Blog-url, Twitter-handle, Github-url] From abff2070cf4fbdbdef0d4ce7679ac562788c2bf2 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Mon, 9 Jan 2023 14:19:45 -0500 Subject: [PATCH 17/20] Downloads parser --- scripts/artifacts/downloads.py | 79 +++++++++++++++++ scripts/report.py | 151 +++++++++++++++++---------------- 2 files changed, 155 insertions(+), 75 deletions(-) create mode 100644 scripts/artifacts/downloads.py diff --git a/scripts/artifacts/downloads.py b/scripts/artifacts/downloads.py new file mode 100644 index 00000000..5d0376bf --- /dev/null +++ b/scripts/artifacts/downloads.py @@ -0,0 +1,79 @@ +# Module Description: Parses native downloads database +# Author: @KevinPagano3 (Twitter) / stark4n6@infosec.exchange (Mastodon) +# Date: 2023-01-09 +# Artifact version: 0.0.1 + +import sqlite3 +import os +from datetime import datetime + +from scripts.artifact_report import ArtifactHtmlReport +from scripts.ilapfuncs import logfunc, timeline, tsv, is_platform_windows, open_sqlite_db_readonly + +def get_downloads(files_found, report_folder, seeker, wrap_text): + + for file_found in files_found: + file_found = str(file_found) + if not os.path.basename(file_found) == 'downloads.db': # skip -journal and other files + continue + + db = open_sqlite_db_readonly(file_found) + + #Get file downloads + cursor = db.cursor() + cursor.execute(''' + select + datetime(lastmod/1000,'unixepoch') as "Modified/Downloaded Timestamp", + title, + description, + uri, + _data, + mimetype, + notificationpackage, + current_bytes, + total_bytes, + status, + errorMsg, + etag, + case is_visible_in_downloads_ui + when 0 then 'No' + when 1 then 'Yes' + end, + case deleted + when 0 then '' + when 1 then 'Yes' + end + from downloads + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + data_list = [] + for row in all_rows: + + data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10],row[11],row[12],row[13])) + + description = 'Native downloads' + report = ArtifactHtmlReport('Native Downloads') + report.start_artifact_report(report_folder, 'Native Downloads', description) + report.add_script() + data_headers = ('Modified/Downloaded Timestamp','Title','Description','Provider URI','Save Location','Mime Type','App Provider Package','Current Bytes','Total Bytes','Status','Error Message','ETAG','Visible in Downloads UI','Deleted') + report.write_artifact_data_table(data_headers, data_list, file_found,html_escape=False) + report.end_artifact_report() + + tsvname = 'Native Downloads' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = 'Native Downloads' + timeline(report_folder, tlactivity, data_list, data_headers) + + else: + logfunc('No Native Downloads data available') + +__artifacts__ = { + "Downloads": ( + "Downloads", + ('*/data/com.android.providers.downloads/databases/downloads.db*'), + get_downloads) +} \ No newline at end of file diff --git a/scripts/report.py b/scripts/report.py index a44b0251..3429b83b 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -24,42 +24,24 @@ def get_icon_name(category, artifact): elif category == 'AIRTAGS': icon = 'map-pin' elif category == 'ANDROID SYSTEM INTELLIGENCE': if artifact.find('SIMPLESTORAGE') >=0: icon = 'loader' - elif category == 'FIREBASE CLOUD MESSAGING': icon = 'database' - elif category == 'BURNER': - if artifact.find('NUMBER INFORMATION') >= 0: icon = 'user' - elif artifact.find('COMMUNICATION INFORMATION') >= 0: icon = 'message-circle' - elif category == 'CALCULATOR LOCKER': icon = 'lock' - elif category == 'PLAYGROUND VAULT': icon = 'lock' - elif category == 'ENCRYPTING MEDIA APPS': icon = 'lock' - elif category == 'GOOGLE MAPS VOICE GUIDANCE': icon = 'map' - elif category == 'GMAIL': - if artifact.find('ACTIVE') >= 0: icon = 'at-sign' - elif artifact.find('APP EMAILS') >= 0: icon = 'at-sign' - elif artifact.find('DOWNLOAD REQUESTS') >= 0: icon = 'download-cloud' - elif artifact.find('LABEL DETAILS') >= 0: icon = 'mail' elif category == 'APP INTERACTION': icon = 'bar-chart-2' - elif category == 'PRIVACY DASHBOARD': icon = 'eye' + elif category == 'APP ROLES': icon = 'tool' elif category == 'BASH HISTORY': icon = 'terminal' - elif category == 'SETTINGS SERVICES': - if artifact.find('BATTERY') >=0: icon = 'battery-charging' - elif category == 'DEVICE HEALTH SERVICES': - if artifact.find('BLUETOOTH') >=0: icon = 'bluetooth' - elif artifact.find('BATTERY') >=0: icon = 'battery-charging' - else: icon = 'bar-chart-2' elif category == 'BLUETOOTH CONNECTIONS': icon = 'bluetooth' elif category == 'BUMBLE': if artifact.find('USER SETTINGS') >= 0: icon = 'user' if artifact.find('CHAT MESSAGES') >= 0: icon = 'message-circle' if artifact.find('MATCHES') >= 0: icon = 'smile' + elif category == 'BURNER': + if artifact.find('NUMBER INFORMATION') >= 0: icon = 'user' + elif artifact.find('COMMUNICATION INFORMATION') >= 0: icon = 'message-circle' + elif category == 'CALCULATOR LOCKER': icon = 'lock' elif category == 'CALENDAR': if artifact.find('CALENDAR - EVENTS') >=0: icon = 'calendar' else: icon = 'calendar' - elif category == 'CAST': icon = 'cast' - elif category == 'FITBIT': icon = 'watch' elif category == 'CALL LOGS': icon = 'phone' - elif category == 'IMAGE MANAGER CACHE': icon = 'image' - elif category == 'CLIPBOARD': icon = 'clipboard' elif category == 'CASH APP': icon = 'credit-card' + elif category == 'CAST': icon = 'cast' elif category == 'CHATS': icon = 'message-circle' elif category == 'CHROMIUM': if artifact.find('AUTOFILL') >= 0: icon = 'edit-3' @@ -73,21 +55,27 @@ def get_icon_name(category, artifact): elif artifact.find('TOP SITES') >= 0: icon = 'list' elif artifact.find('WEB VISITS') >= 0: icon = 'globe' else: icon = 'chrome' + elif category == 'CLIPBOARD': icon = 'clipboard' + elif category == 'CONTACTS': icon = 'user' + elif category == 'DEVICE HEALTH SERVICES': + if artifact.find('BLUETOOTH') >=0: icon = 'bluetooth' + elif artifact.find('BATTERY') >=0: icon = 'battery-charging' + else: icon = 'bar-chart-2' elif category == 'DEVICE INFO': if artifact == 'BUILD INFO': icon = 'terminal' elif artifact == 'PARTNER SETTINGS': icon = 'settings' elif artifact.find('SETTINGS_SECURE_') >= 0: icon = 'settings' else: icon = 'info' - elif category == 'ETC HOSTS': icon = 'globe' - elif category == 'WIPE & SETUP': - if artifact == 'FACTORY RESET': icon = 'loader' - elif artifact == 'SUGGESTIONS.XML': icon = 'loader' - elif artifact == 'SETUP_WIZARD_INFO.XML': icon = 'loader' - elif artifact == 'APPOPS.XML': icon = 'loader' - elif artifact == 'SAMSUNG WIPE HISTORY': icon = 'trash-2' - else: icon = 'loader' + elif category == 'DOWNLOADS': icon = 'download' + elif category == 'DUCKDUCKGO': + if artifact == 'DUCKDUCKGO TAB THUMBNAILS': icon = 'image' + else: icon = 'layers' elif category == 'EMULATED STORAGE METADATA': icon = 'database' + elif category == 'ENCRYPTING MEDIA APPS': icon = 'lock' + elif category == 'ETC HOSTS': icon = 'globe' elif category == 'FACEBOOK MESSENGER': icon = 'facebook' + elif category == 'FILES BY GOOGLE': icon = 'file' + elif category == 'FIREBASE CLOUD MESSAGING': icon = 'database' elif category == 'FIREFOX': if artifact.find('BOOKMARKS') >= 0: icon = 'bookmark' elif artifact.find('COOKIES') >= 0: icon = 'info' @@ -99,25 +87,31 @@ def get_icon_name(category, artifact): elif artifact.find('TOP SITES') >= 0: icon = 'list' elif artifact.find('VISITS') >= 0: icon = 'globe' elif artifact.find('WEB HISTORY') >= 0: icon = 'globe' + elif category == 'FITBIT': icon = 'watch' + elif category == 'GEO LOCATION': icon = 'map-pin' + elif category == 'GMAIL': + if artifact.find('ACTIVE') >= 0: icon = 'at-sign' + elif artifact.find('APP EMAILS') >= 0: icon = 'at-sign' + elif artifact.find('DOWNLOAD REQUESTS') >= 0: icon = 'download-cloud' + elif artifact.find('LABEL DETAILS') >= 0: icon = 'mail' + elif category == 'GOOGLE MAPS VOICE GUIDANCE': icon = 'map' elif category == 'GOOGLE CHAT': if artifact.find('GROUP INFORMATION') >= 0: icon = 'users' elif artifact.find('CHAT MESSAGES') >= 0: icon = 'message-circle' elif category == 'GOOGLE DRIVE': icon = 'file' - elif category == 'FILES BY GOOGLE': icon = 'file' elif category == 'GOOGLE DUO': if artifact.find('CALL HISTORY') >= 0: icon = 'phone-call' elif artifact.find('CONTACTS') >= 0: icon = 'users' elif artifact.find('NOTES') >= 0: icon = 'edit-3' elif category == 'GOOGLE FIT (GMS)': icon = 'activity' elif category == 'GOOGLE KEEP': icon = 'list' - elif category == 'TOR': icon = 'globe' elif category == 'GBOARD KEYBOARD': icon = 'edit-3' + elif category == 'GOOGLE MESSAGES': icon = 'message-circle' elif category == 'GOOGLE NOW & QUICKSEARCH': icon = 'search' elif category == 'GOOGLE PHOTOS': if artifact.find('LOCAL TRASH') >=0: icon = 'trash-2' elif artifact.find('BACKED UP FOLDER') >= 0: icon = 'refresh-cw' else: icon = 'image' - elif category == 'GOOGLE MESSAGES': icon = 'message-circle' elif category == 'GOOGLE PLAY': if artifact == 'GOOGLE PLAY SEARCHES': icon = 'search' else: icon = 'play' @@ -126,7 +120,15 @@ def get_icon_name(category, artifact): if artifact.find('GROUP INFORMATION') >= 0: icon = 'users' elif artifact.find('CHAT INFORMATION') >= 0: icon = 'message-circle' elif category == 'HIDEX': icon = 'eye-off' + elif category == 'IMAGE MANAGER CACHE': icon = 'image' + elif category == 'IMO': + if artifact == 'IMO - ACCOUNT ID': icon = 'user' + elif artifact == 'IMO - MESSAGES': icon = 'message-square' elif category == 'INSTALLED APPS': icon = 'package' + elif category == 'LINE': + if artifact == 'LINE - CONTACTS': icon = 'user' + elif artifact == 'LINE - MESSAGES': icon = 'message-square' + elif artifact == 'LINE - CALL LOGS': icon = 'phone' elif category == 'MASTODON': if artifact.find('ACCOUNT DETAILS') >= 0: icon = 'user' elif artifact.find('ACCOUNT SEARCHES') >= 0: icon = 'users' @@ -141,16 +143,18 @@ def get_icon_name(category, artifact): if artifact.find('MY FILES DB - CACHE MEDIA') >=0: icon = 'image' else: icon = 'file-plus' elif category == 'NOW PLAYING': icon = 'music' + elif category == 'PERMISSIONS': icon = 'check' + elif category == 'PLAYGROUND VAULT': icon = 'lock' elif category == 'POWER EVENTS': if artifact.find('POWER OFF RESET'): icon = 'power' elif artifact.find('LAST BOOT TIME'): icon = 'power' elif artifact.find('SHUTDOWN CHECKPOINTS'): icon = 'power' + elif category == 'PRIVACY DASHBOARD': icon = 'eye' elif category == 'PROTONMAIL': if artifact.find('CONTACTS') >=0: icon = 'users' elif artifact.find('MESSAGES') >=0: icon = 'inbox' else: icon = 'mail' elif category == 'PROTONVPN': icon = 'shield' - elif category == 'GEO LOCATION': icon = 'map-pin' elif category == 'RCS CHATS': icon = 'message-circle' elif category == 'RECENT ACTIVITY': icon = 'activity' elif category == 'SAMSUNG SMARTTHINGS': icon = 'bluetooth' @@ -160,13 +164,24 @@ def get_icon_name(category, artifact): else: icon = 'sun' elif category == 'SAMSUNG_CMH': icon = 'disc' elif category == 'SCRIPT LOGS': icon = 'archive' + elif category == 'SETTINGS SERVICES': + if artifact.find('BATTERY') >=0: icon = 'battery-charging' + elif category == 'SKOUT': + if artifact == 'SKOUT MESSAGES': icon = 'message-circle' + elif artifact == 'SKOUT USERS': icon = 'users' + elif category == 'SKYPE': + if artifact == 'SKYPE - CALL LOGS': icon = 'phone' + elif artifact == 'SKYPE - MESSAGES': icon = 'message-square' + elif artifact == 'SKYPE - CONTACTS': icon = 'user' elif category == 'SLOPES': if artifact == 'SLOPES - ACTIONS': icon = 'trending-down' elif artifact == 'SLOPES - LIFT DETAILS': icon = 'shuffle' elif artifact == 'SLOPES - RESORT DETAILS': icon = 'home' - elif category == 'SKOUT': - if artifact == 'SKOUT MESSAGES': icon = 'message-circle' - elif artifact == 'SKOUT USERS': icon = 'users' + elif category == 'SMS & MMS': icon = 'message-square' + elif category == 'SNAPCHAT': icon = 'bell' + elif category == 'SQLITE JOURNALING': icon = 'book-open' + elif category == 'TANGO': + if artifact == 'TANGO - MESSAGES': icon = 'message-square' elif category == 'TEAMS': if artifact == 'TEAMS MESSAGES': icon = 'message-circle' elif artifact == 'TEAMS USERS': icon = 'users' @@ -174,54 +189,33 @@ def get_icon_name(category, artifact): elif artifact == 'TEAMS ACTIVITY FEED': icon = 'at-sign' elif artifact == 'TEAMS FILE INFO': icon = 'file' else: icon = 'file-text' + elif category == 'TEXT NOW': + if artifact == 'TEXT NOW - CALL LOGS': icon = 'phone' + elif artifact == 'TEXT NOW - MESSAGES': icon = 'message-square' + elif artifact == 'TEXT NOW - CONTACTS': icon = 'user' + elif category == 'TIKTOK': + if artifact == 'TIKTOK - MESSAGES': icon = 'message-square' + elif artifact == 'TIKTOK - CONTACTS': icon = 'user' + elif category == 'TOR': icon = 'globe' elif category == 'TUSKY': if artifact.find('TIMELINE') >=0: icon = 'activity' elif artifact.find('ACCOUNT') >=0: icon = 'user' + elif category == 'USAGE STATS': icon = 'bar-chart-2' + elif category == 'USER DICTIONARY': icon = 'book' elif category == 'VIBER': if artifact == 'VIBER - CONTACTS': icon = 'user' elif artifact == 'VIBER - MESSAGES': icon = 'message-square' elif artifact == 'VIBER - CALL LOGS': icon = 'phone' - elif category == 'SMS & MMS': icon = 'message-square' - elif category == 'SQLITE JOURNALING': icon = 'book-open' - elif category == 'USAGE STATS': icon = 'bar-chart-2' - elif category == 'USER DICTIONARY': icon = 'book' - elif category == 'WAZE': icon = 'navigation-2' - elif category == 'WELLBEING' or category == 'WELLBEING ACCOUNT': - if artifact == 'ACCOUNT DATA': icon = 'user' - else: icon = 'layers' - elif category == 'WIFI PROFILES': icon = 'wifi' - elif category == 'PERMISSIONS': icon = 'check' - elif category == 'APP ROLES': icon = 'tool' - elif category == 'DUCKDUCKGO': - if artifact == 'DUCKDUCKGO TAB THUMBNAILS': icon = 'image' - else: icon = 'layers' - elif category == 'LINE': - if artifact == 'LINE - CONTACTS': icon = 'user' - elif artifact == 'LINE - MESSAGES': icon = 'message-square' - elif artifact == 'LINE - CALL LOGS': icon = 'phone' - elif category == 'IMO': - if artifact == 'IMO - ACCOUNT ID': icon = 'user' - elif artifact == 'IMO - MESSAGES': icon = 'message-square' - elif category == 'TANGO': - if artifact == 'TANGO - MESSAGES': icon = 'message-square' elif category == 'VLC': if artifact == 'VLC MEDIA LIST': icon = 'film' elif artifact == 'VLC THUMBNAILS': icon = 'image' elif category == 'VLC THUMBS': if artifact == 'VLC MEDIA LIB': icon = 'film' elif artifact == 'VLC THUMBNAILS': icon = 'image' - elif category == 'SNAPCHAT': icon = 'bell' - elif category == 'SKYPE': - if artifact == 'SKYPE - CALL LOGS': icon = 'phone' - elif artifact == 'SKYPE - MESSAGES': icon = 'message-square' - elif artifact == 'SKYPE - CONTACTS': icon = 'user' - elif category == 'TEXT NOW': - if artifact == 'TEXT NOW - CALL LOGS': icon = 'phone' - elif artifact == 'TEXT NOW - MESSAGES': icon = 'message-square' - elif artifact == 'TEXT NOW - CONTACTS': icon = 'user' - elif category == 'TIKTOK': - if artifact == 'TIKTOK - MESSAGES': icon = 'message-square' - elif artifact == 'TIKTOK - CONTACTS': icon = 'user' + elif category == 'WAZE': icon = 'navigation-2' + elif category == 'WELLBEING' or category == 'WELLBEING ACCOUNT': + if artifact == 'ACCOUNT DATA': icon = 'user' + else: icon = 'layers' elif category == 'WHATSAPP': if artifact == 'WHATSAPP - CONTACTS': icon = 'users' elif artifact == 'WHATSAPP - ONE TO ONE MESSAGES': icon = 'message-circle' @@ -230,7 +224,14 @@ def get_icon_name(category, artifact): elif artifact == 'WHATSAPP - GROUP DETAILS': icon = 'users' elif artifact == 'WHATSAPP - MESSAGES': icon = 'message-square' else: icon = 'user' - elif category == 'CONTACTS': icon = 'user' + elif category == 'WIFI PROFILES': icon = 'wifi' + elif category == 'WIPE & SETUP': + if artifact == 'FACTORY RESET': icon = 'loader' + elif artifact == 'SUGGESTIONS.XML': icon = 'loader' + elif artifact == 'SETUP_WIZARD_INFO.XML': icon = 'loader' + elif artifact == 'APPOPS.XML': icon = 'loader' + elif artifact == 'SAMSUNG WIPE HISTORY': icon = 'trash-2' + else: icon = 'loader' return icon def generate_report(reportfolderbase, time_in_secs, time_HMS, extraction_type, image_input_path): From a135960b49c04e5d96582f89f62249e28bb2b307 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Tue, 10 Jan 2023 15:30:38 -0500 Subject: [PATCH 18/20] Timeline and Bug Fixes --- scripts/artifacts/FacebookMessenger.py | 8 +- .../artifacts/FilesByGoogle_FilesMaster.py | 10 +- scripts/artifacts/WhatsApp.py | 6 - scripts/artifacts/airGuard.py | 6 +- scripts/artifacts/bumble.py | 2 - scripts/artifacts/clipBoard.py | 6 +- scripts/artifacts/contacts.py | 3 - scripts/artifacts/gboard.py | 14 +- scripts/artifacts/googleCalendar.py | 5 +- scripts/artifacts/mewe.py | 1 - scripts/artifacts/snapchat.py | 4 +- scripts/artifacts/swellbeing.py | 13 +- scripts/artifacts/wellbeing.py | 131 ++++++++++++------ scripts/artifacts/wellbeingURLs.py | 62 --------- scripts/artifacts/wellbeingaccount.py | 2 +- scripts/artifacts/wifiConfigstore.py | 4 +- scripts/report.py | 7 +- 17 files changed, 128 insertions(+), 156 deletions(-) delete mode 100755 scripts/artifacts/wellbeingURLs.py diff --git a/scripts/artifacts/FacebookMessenger.py b/scripts/artifacts/FacebookMessenger.py index 816987c2..925dc820 100755 --- a/scripts/artifacts/FacebookMessenger.py +++ b/scripts/artifacts/FacebookMessenger.py @@ -118,13 +118,13 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'Facebook Messenger{typeof}-- Chats{usernum}' + tsvname = f'Facebook Messenger{typeof}- Chats{usernum}' tsv(report_folder, data_headers, data_list, tsvname, source_file) - tlactivity = f'Facebook Messenger{typeof}-- Chats{usernum}' + tlactivity = f'Facebook Messenger{typeof}- Chats{usernum}' timeline(report_folder, tlactivity, data_list, data_headers) else: - logfunc(f'No Facebook{typeof} - Chats data available{usernum}') + logfunc(f'No Facebook{typeof}- Chats data available{usernum}') cursor.execute(''' select @@ -199,8 +199,6 @@ def get_FacebookMessenger(files_found, report_folder, seeker, wrap_text): tsvname = f'Facebook{typeof}- Contacts{usernum}' tsv(report_folder, data_headers, data_list, tsvname, source_file) - tlactivity = f'Facebook{typeof}- Contacts{usernum}' - timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc(f'No Facebook{typeof}- Contacts data available{usernum}') diff --git a/scripts/artifacts/FilesByGoogle_FilesMaster.py b/scripts/artifacts/FilesByGoogle_FilesMaster.py index 65ae7675..42169acd 100755 --- a/scripts/artifacts/FilesByGoogle_FilesMaster.py +++ b/scripts/artifacts/FilesByGoogle_FilesMaster.py @@ -15,14 +15,14 @@ def get_FilesByGoogle_FilesMaster(files_found, report_folder, seeker, wrap_text) cursor = db.cursor() cursor.execute(''' select - root_path, - root_relative_file_path, - file_name, - size, case file_date_modified_ms when 0 then '' else datetime(file_date_modified_ms/1000,'unixepoch') end as file_date_modified_ms, + root_path, + root_relative_file_path, + file_name, + size, mime_type, case media_type when 0 then 'App/Data' @@ -47,7 +47,7 @@ def get_FilesByGoogle_FilesMaster(files_found, report_folder, seeker, wrap_text) report = ArtifactHtmlReport('Files by Google - Files Master') report.start_artifact_report(report_folder, 'Files by Google - Files Master') report.add_script() - data_headers = ('Root Path','Root Relative Path','File Name','Size','Date Modified','Mime Type','Media Type','URI','Hidden','Title','Parent Folder') # Don't remove the comma, that is required to make this a tuple as there is only 1 element + data_headers = ('Date Modified','Root Path','Root Relative Path','File Name','Size','Mime Type','Media Type','URI','Hidden','Title','Parent Folder') # Don't remove the comma, that is required to make this a tuple as there is only 1 element data_list = [] for row in all_rows: data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7],row[8],row[9],row[10])) diff --git a/scripts/artifacts/WhatsApp.py b/scripts/artifacts/WhatsApp.py index aa6b7c0a..7d9c2832 100755 --- a/scripts/artifacts/WhatsApp.py +++ b/scripts/artifacts/WhatsApp.py @@ -312,7 +312,6 @@ def get_WhatsApp(files_found, report_folder, seeker, wrap_text): ELSE "" END AS "Sending Party JID", CASE - WHEN message.from_me=0 THEN "Incoming" WHEN message.from_me=1 THEN "Outgoing" END AS "Message Direction", @@ -380,9 +379,6 @@ def get_WhatsApp(files_found, report_folder, seeker, wrap_text): else: logfunc('No WhatsApp - Group Messages found') - - - try: cursor.execute(''' SELECT @@ -455,8 +451,6 @@ def get_WhatsApp(files_found, report_folder, seeker, wrap_text): tsvname = "WhatsApp - User Profile" tsv(report_folder, data_headers, data_list,tsvname) - tlactivity = "WhatsApp - User Profile" - timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc("No WhatsApp - Profile data found") diff --git a/scripts/artifacts/airGuard.py b/scripts/artifacts/airGuard.py index af718e66..94da7465 100755 --- a/scripts/artifacts/airGuard.py +++ b/scripts/artifacts/airGuard.py @@ -35,7 +35,11 @@ def get_airGuard(files_found, report_folder, seeker, wrap_text): data_headers_kml = ('Timestamp','Time (Local)','Device MAC Address','Latitude','Longitude','Signal Strength (RSSI)','First Time Device Seen','Last Time User Notified') data_list = [] for row in all_rows: - data_list.append((row[0],row[1],row[2],row[3],row[4],row[5],row[6],row[7])) + last_time_dev_seen = str(row[0]).replace("T", " ") + time_local = str(row[1]).replace("T", " ") + first_time_dev_seen = str(row[6]).replace("T", " ") + last_time_user_notified = str(row[7]).replace("T", " ") + data_list.append((last_time_dev_seen,time_local,row[2],row[3],row[4],row[5],first_time_dev_seen,last_time_user_notified)) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() diff --git a/scripts/artifacts/bumble.py b/scripts/artifacts/bumble.py index 341b595d..558704df 100644 --- a/scripts/artifacts/bumble.py +++ b/scripts/artifacts/bumble.py @@ -550,8 +550,6 @@ def get_bumble(files_found, report_folder, seeker, wrap_text): tsvname = f'Bumble - User Settings' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Bumble - User Settings' - timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Bumble - User Settings data available') diff --git a/scripts/artifacts/clipBoard.py b/scripts/artifacts/clipBoard.py index 0243e70d..0c506204 100755 --- a/scripts/artifacts/clipBoard.py +++ b/scripts/artifacts/clipBoard.py @@ -44,14 +44,14 @@ def get_clipBoard(files_found, report_folder, seeker, wrap_text): path = file_found modtime = os.path.getmtime(file_found) modtime = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(modtime)) - data_list.append((thumb, modtime, path)) + data_list.append((modtime, thumb, path)) else: #print('Outside of Matching') path = file_found textdata = triage_text(file_found) modtime = os.path.getmtime(file_found) modtime = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(modtime)) - data_list.append((textdata, modtime, path)) + data_list.append((modtime, textdata, path)) @@ -59,7 +59,7 @@ def get_clipBoard(files_found, report_folder, seeker, wrap_text): report = ArtifactHtmlReport('Clipboard Data') report.start_artifact_report(report_folder, f'Clipboard Data') report.add_script() - data_headers = ('Data', 'Modified Time', 'Path') + data_headers = ('Modified Time', 'Data', 'Path') report.write_artifact_data_table(data_headers, data_list, file_found, html_escape=False) report.end_artifact_report() diff --git a/scripts/artifacts/contacts.py b/scripts/artifacts/contacts.py index 99863c4d..3e03ec88 100755 --- a/scripts/artifacts/contacts.py +++ b/scripts/artifacts/contacts.py @@ -65,9 +65,6 @@ def get_contacts(files_found, report_folder, seeker, wrap_text): tsvname = f'Contacts' tsv(report_folder, data_headers, data_list, tsvname, source_file) - - tlactivity = f'Contaccts' - timeline(report_folder, tlactivity, data_list, data_headers) else: logfunc('No Contacts found') diff --git a/scripts/artifacts/gboard.py b/scripts/artifacts/gboard.py index 018f4d86..7bce1101 100755 --- a/scripts/artifacts/gboard.py +++ b/scripts/artifacts/gboard.py @@ -100,10 +100,10 @@ def read_trainingcache2(file_found, report_folder, seeker): report.start_artifact_report(report_folder, f'{file_name}', description) report.add_script() - data_headers = ('Id','Text','App','Input Name','Input ID','Event Timestamp') + data_headers = ('Event Timestamp','ID','Text','App','Input Name','Input ID') data_list = [] for ke in keyboard_events: - data_list.append((ke.id, ke.text, ke.app, ke.textbox_name, ke.textbox_id, ke.event_date)) + data_list.append((ke.event_date, ke.id, ke.text, ke.app, ke.textbox_name, ke.textbox_id)) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() @@ -187,10 +187,10 @@ def read_trainingcachev2(file_found, report_folder, seeker): report.start_artifact_report(report_folder, f'{file_name}', description) report.add_script() - data_headers = ('Id','Text','App','Input Name','Input ID','Event Timestamp') + data_headers = ('Event Timestamp','ID','Text','App','Input Name','Input ID') data_list = [] for ke in keyboard_events: - data_list.append((ke.id, ke.text, ke.app, ke.textbox_name, ke.textbox_id, ke.event_date)) + data_list.append((ke.event_date, ke.id, ke.text, ke.app, ke.textbox_name, ke.textbox_id)) report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() @@ -217,9 +217,9 @@ def read_trainingcachev3_sessions(file_found, report_folder, seeker): # Sessions sql = """ SELECT - session._session_id AS Session, datetime(session._session_id / 1000, 'unixepoch') AS Start, datetime(session._timestamp_ / 1000, 'unixepoch') AS Finish, + session._session_id AS Session, session.package_name AS Application FROM session @@ -228,7 +228,7 @@ def read_trainingcachev3_sessions(file_found, report_folder, seeker): results = cursor.fetchall() if results: - data_headers = ("Session ID", "Start", "Finish", "Application") + data_headers = ("Start", "Finish", "Session ID", "Application") data_list = results description = "GBoard Sessions" @@ -239,6 +239,8 @@ def read_trainingcachev3_sessions(file_found, report_folder, seeker): report.end_artifact_report() tsv(report_folder, data_headers, data_list, title) + + timeline(report_folder, title, data_list, data_headers) # Close conn.close() diff --git a/scripts/artifacts/googleCalendar.py b/scripts/artifacts/googleCalendar.py index 4816f92e..b33974c6 100644 --- a/scripts/artifacts/googleCalendar.py +++ b/scripts/artifacts/googleCalendar.py @@ -86,7 +86,10 @@ def get_calendar(files_found, report_folder, seeker, wrap_text): cursor = db.cursor() cursor.execute(''' select - datetime(cal_sync8/1000,'unixepoch') as "Last Synced Timestamp", + case + when cal_sync8 is NULL then '' + else datetime(cal_sync8/1000,'unixepoch') + end, name, calendar_displayName, account_name, diff --git a/scripts/artifacts/mewe.py b/scripts/artifacts/mewe.py index 344749f9..63cf141a 100755 --- a/scripts/artifacts/mewe.py +++ b/scripts/artifacts/mewe.py @@ -31,7 +31,6 @@ JOIN CHAT_THREAD ON threadId = CHAT_THREAD.id ''' - def _perform_query(cursor, query): try: cursor.execute(query) diff --git a/scripts/artifacts/snapchat.py b/scripts/artifacts/snapchat.py index c9e4dd50..21b51426 100755 --- a/scripts/artifacts/snapchat.py +++ b/scripts/artifacts/snapchat.py @@ -11,9 +11,9 @@ # Last actions taken in the application and who did them FEED_QUERY = ''' SELECT + DATETIME(lastInteractionTimestamp/1000, 'unixepoch', 'localtime'), key, displayInteractionType, - DATETIME(lastInteractionTimestamp/1000, 'unixepoch', 'localtime'), DATETIME(lastReadTimestamp/1000, 'unixepoch', 'localtime'), lastReader, DATETIME(lastWriteTimestamp/1000, 'unixepoch', 'localtime'), @@ -171,7 +171,7 @@ def _parse_feeds(feeds_count, rows, report_folder, db_file_name): logfunc(f'{feeds_count} feeds found') data_headers = ( - 'Key', 'Display Interaction Type', 'Last Interaction Timestamp', + 'Last Interaction Timestamp','Key', 'Display Interaction Type', 'Last Read Timestamp', 'Last Reader', 'Last Write Timestamp', 'Last Writer', 'Last Write Type' ) diff --git a/scripts/artifacts/swellbeing.py b/scripts/artifacts/swellbeing.py index 43459e02..e535efbe 100755 --- a/scripts/artifacts/swellbeing.py +++ b/scripts/artifacts/swellbeing.py @@ -39,14 +39,13 @@ def get_swellbeing(files_found, report_folder, seeker, wrap_text): END as eventTypeDescription FROM usageEvents INNER JOIN foundPackages ON usageEvents.pkgId=foundPackages.pkgId - ''') all_rows = cursor.fetchall() usageentries = len(all_rows) if usageentries > 0: - report = ArtifactHtmlReport('Samsung Wellbeing events') - report.start_artifact_report(report_folder, 'Events') + report = ArtifactHtmlReport('Samsung Digital Wellbeing - Events') + report.start_artifact_report(report_folder, 'Samsung Digital Wellbeing - Events') report.add_script() data_headers = ('Timestamp','Event ID','Package Name','Event Type','Event Type Description') data_list = [] @@ -56,19 +55,19 @@ def get_swellbeing(files_found, report_folder, seeker, wrap_text): report.write_artifact_data_table(data_headers, data_list, file_found) report.end_artifact_report() - tsvname = f'samsung wellbeing - events' + tsvname = f'Samsung Digital Wellbeing - Events' tsv(report_folder, data_headers, data_list, tsvname) - tlactivity = f'Samsung Wellbeing - Events' + tlactivity = f'Samsung Digital Wellbeing - Events' timeline(report_folder, tlactivity, data_list, data_headers) else: - logfunc('No Samsung Wellbeing event data available') + logfunc('No Samsung Digital Wellbeing - Events data available') db.close() __artifacts__ = { "swellbeing": ( - "Wellbeing", + "Digital Wellbeing", ('*/com.samsung.android.forest/databases/dwbCommon.db*'), get_swellbeing) } \ No newline at end of file diff --git a/scripts/artifacts/wellbeing.py b/scripts/artifacts/wellbeing.py index 13dd4983..abc19895 100755 --- a/scripts/artifacts/wellbeing.py +++ b/scripts/artifacts/wellbeing.py @@ -10,57 +10,98 @@ def get_wellbeing(files_found, report_folder, seeker, wrap_text): if not file_found.endswith('app_usage'): continue # Skip all other files - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - cursor.execute(''' - SELECT - events._id, - datetime(events.timestamp /1000, 'UNIXEPOCH') as timestamps, - packages.package_name, - events.type, - case - when events.type = 1 THEN 'ACTIVITY_RESUMED' - when events.type = 2 THEN 'ACTIVITY_PAUSED' - when events.type = 12 THEN 'NOTIFICATION' - when events.type = 18 THEN 'KEYGUARD_HIDDEN & || Device Unlock' - when events.type = 19 THEN 'FOREGROUND_SERVICE_START' - when events.type = 20 THEN 'FOREGROUND_SERVICE_STOP' - when events.type = 23 THEN 'ACTIVITY_STOPPED' - when events.type = 26 THEN 'DEVICE_SHUTDOWN' - when events.type = 27 THEN 'DEVICE_STARTUP' - else events.type - END as eventtype - FROM - events INNER JOIN packages ON events.package_id=packages._id - ''') + db = open_sqlite_db_readonly(file_found) + cursor = db.cursor() + cursor.execute(''' + SELECT + events._id, + datetime(events.timestamp /1000, 'UNIXEPOCH') as timestamps, + packages.package_name, + events.type, + case + when events.type = 1 THEN 'ACTIVITY_RESUMED' + when events.type = 2 THEN 'ACTIVITY_PAUSED' + when events.type = 12 THEN 'NOTIFICATION' + when events.type = 18 THEN 'KEYGUARD_HIDDEN & || Device Unlock' + when events.type = 19 THEN 'FOREGROUND_SERVICE_START' + when events.type = 20 THEN 'FOREGROUND_SERVICE_STOP' + when events.type = 23 THEN 'ACTIVITY_STOPPED' + when events.type = 26 THEN 'DEVICE_SHUTDOWN' + when events.type = 27 THEN 'DEVICE_STARTUP' + else events.type + END as eventtype + FROM + events INNER JOIN packages ON events.package_id=packages._id + ''') - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Wellbeing events') - report.start_artifact_report(report_folder, 'Events') - report.add_script() - data_headers = ('Timestamp', 'Package ID', 'Event Type') - data_list = [] - for row in all_rows: - data_list.append((row[1], row[2], row[4])) + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Digital Wellbeing - Events') + report.start_artifact_report(report_folder, 'Events') + report.add_script() + data_headers = ('Timestamp', 'Package ID', 'Event Type') + data_list = [] + for row in all_rows: + data_list.append((row[1], row[2], row[4])) - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'wellbeing - events' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Wellbeing - Events' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Wellbeing event data available') + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() - db.close() + tsvname = f'Digital Wellbeing - Events' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Digital Wellbeing - Events' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Digital Wellbeing - Events data available') + + cursor = db.cursor() + cursor.execute(''' + SELECT + datetime(component_events.timestamp/1000, "UNIXEPOCH") as timestamp, + component_events._id, + components.package_id, + packages.package_name, + components.component_name as website, + CASE + when component_events.type=1 THEN 'ACTIVITY_RESUMED' + when component_events.type=2 THEN 'ACTIVITY_PAUSED' + else component_events.type + END as eventType + FROM component_events + INNER JOIN components ON component_events.component_id=components._id + INNER JOIN packages ON components.package_id=packages._id + ORDER BY timestamp + ''') + + all_rows = cursor.fetchall() + usageentries = len(all_rows) + if usageentries > 0: + report = ArtifactHtmlReport('Digital Wellbeing - URL Events') + report.start_artifact_report(report_folder, 'Digital Wellbeing - URL Events') + report.add_script() + data_headers = ('Timestamp', 'Event ID', 'Package ID', 'Package Name', 'Website', 'Event') + data_list = [] + for row in all_rows: + data_list.append((row[0], row[1], row[2], row[3], row[4], row[5])) + report.write_artifact_data_table(data_headers, data_list, file_found) + report.end_artifact_report() + + tsvname = f'Digital Wellbeing - URL Events' + tsv(report_folder, data_headers, data_list, tsvname) + + tlactivity = f'Digital Wellbeing - URL Events' + timeline(report_folder, tlactivity, data_list, data_headers) + else: + logfunc('No Digital Wellbeing - URL Events data available') + + db.close() + __artifacts__ = { "wellbeing": ( - "Wellbeing", + "Digital Wellbeing", ('*/com.google.android.apps.wellbeing/databases/app_usage*'), get_wellbeing) } \ No newline at end of file diff --git a/scripts/artifacts/wellbeingURLs.py b/scripts/artifacts/wellbeingURLs.py deleted file mode 100755 index 33153443..00000000 --- a/scripts/artifacts/wellbeingURLs.py +++ /dev/null @@ -1,62 +0,0 @@ -import os -import sqlite3 -from scripts.artifact_report import ArtifactHtmlReport -from scripts.ilapfuncs import logfunc, tsv, timeline, is_platform_windows, open_sqlite_db_readonly - -def get_wellbeingURLs(files_found, report_folder, seeker, wrap_text): - - for file_found in files_found: - file_found = str(file_found) - if not file_found.endswith('app_usage'): - continue # Skip all other files - - db = open_sqlite_db_readonly(file_found) - cursor = db.cursor() - cursor.execute(''' - SELECT - datetime(component_events.timestamp/1000, "UNIXEPOCH") as timestamp, - component_events._id, - components.package_id, - packages.package_name, - components.component_name as website, - CASE - when component_events.type=1 THEN 'ACTIVITY_RESUMED' - when component_events.type=2 THEN 'ACTIVITY_PAUSED' - else component_events.type - END as eventType - FROM component_events - INNER JOIN components ON component_events.component_id=components._id - INNER JOIN packages ON components.package_id=packages._id - ORDER BY timestamp - ''') - - all_rows = cursor.fetchall() - usageentries = len(all_rows) - if usageentries > 0: - report = ArtifactHtmlReport('Wellbeing URL events') - report.start_artifact_report(report_folder, 'URL Events') - report.add_script() - data_headers = ('Timestamp', 'Event ID', 'Package ID', 'Package Name', 'Website', 'Event') - data_list = [] - for row in all_rows: - data_list.append((row[0], row[1], row[2], row[3], row[4], row[5])) - - report.write_artifact_data_table(data_headers, data_list, file_found) - report.end_artifact_report() - - tsvname = f'wellbeing - URL events' - tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Wellbeing - URL Events' - timeline(report_folder, tlactivity, data_list, data_headers) - else: - logfunc('No Wellbeing URL event data available') - - db.close() - -__artifacts__ = { - "wellbeingURLs": ( - "Wellbeing", - ('*/com.google.android.apps.wellbeing/databases/app_usage*'), - get_wellbeingURLs) -} \ No newline at end of file diff --git a/scripts/artifacts/wellbeingaccount.py b/scripts/artifacts/wellbeingaccount.py index c41ffcde..ec8a0300 100755 --- a/scripts/artifacts/wellbeingaccount.py +++ b/scripts/artifacts/wellbeingaccount.py @@ -26,7 +26,7 @@ def get_wellbeingaccount(files_found, report_folder, seeker, wrap_text): __artifacts__ = { "wellbeingaccount": ( - "Wellbeing", + "Digital Wellbeing", ('*/com.google.android.apps.wellbeing/files/AccountData.pb'), get_wellbeingaccount) } \ No newline at end of file diff --git a/scripts/artifacts/wifiConfigstore.py b/scripts/artifacts/wifiConfigstore.py index fb0868b2..e5bca20a 100755 --- a/scripts/artifacts/wifiConfigstore.py +++ b/scripts/artifacts/wifiConfigstore.py @@ -64,9 +64,7 @@ def get_wifiConfigstore(files_found, report_folder, seeker, wrap_text): tsvname = f'Wifi Configuration Store data' tsv(report_folder, data_headers, data_list, tsvname) - - tlactivity = f'Wifi Configuration Store data' - timeline(report_folder, tlactivity, data_list, data_headers) + else: logfunc('No Wifi Configuration Store data available') diff --git a/scripts/report.py b/scripts/report.py index 3429b83b..623ce677 100755 --- a/scripts/report.py +++ b/scripts/report.py @@ -66,6 +66,9 @@ def get_icon_name(category, artifact): elif artifact == 'PARTNER SETTINGS': icon = 'settings' elif artifact.find('SETTINGS_SECURE_') >= 0: icon = 'settings' else: icon = 'info' + elif category == 'DIGITAL WELLBEING' or category == 'DIGITAL WELLBEING ACCOUNT': + if artifact.find('ACCOUNT DATA') >= 0: icon = 'user' + else: icon = 'layers' elif category == 'DOWNLOADS': icon = 'download' elif category == 'DUCKDUCKGO': if artifact == 'DUCKDUCKGO TAB THUMBNAILS': icon = 'image' @@ -213,9 +216,7 @@ def get_icon_name(category, artifact): if artifact == 'VLC MEDIA LIB': icon = 'film' elif artifact == 'VLC THUMBNAILS': icon = 'image' elif category == 'WAZE': icon = 'navigation-2' - elif category == 'WELLBEING' or category == 'WELLBEING ACCOUNT': - if artifact == 'ACCOUNT DATA': icon = 'user' - else: icon = 'layers' + elif category == 'WHATSAPP': if artifact == 'WHATSAPP - CONTACTS': icon = 'users' elif artifact == 'WHATSAPP - ONE TO ONE MESSAGES': icon = 'message-circle' From 0f1b3c58ed646e4be99d837339b7577a45edd94b Mon Sep 17 00:00:00 2001 From: Alexis Bringoni Date: Tue, 10 Jan 2023 19:12:51 -0500 Subject: [PATCH 19/20] Update README.md Python 3.11.1 supported. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0c8e7f71..caef8d2c 100755 --- a/README.md +++ b/README.md @@ -8,7 +8,7 @@ Details in blog post here: https://abrignoni.blogspot.com/2020/02/aleapp-android ## Requirements -**Python 3.9.x ** (older versions of 3.x will also work with the exception of one or two modules) +**Python 3.9 or above** (older versions of 3.x will also work with the exception of one or two modules) ### Dependencies From 71fe4f6ebe0a89046540edd9a2aa74b8e565d266 Mon Sep 17 00:00:00 2001 From: stark4n6 <48143894+stark4n6@users.noreply.github.com> Date: Wed, 11 Jan 2023 11:43:21 -0500 Subject: [PATCH 20/20] Timelining Updates - Small fixes to display proper times in timeline - Remove entries that aren't timelined --- scripts/artifacts/WhatsApp.py | 18 ++++++++---- scripts/artifacts/downloads.py | 4 ++- scripts/artifacts/mewe.py | 13 ++++++--- scripts/artifacts/snapchat.py | 53 +++++++++++++++++++++------------- 4 files changed, 58 insertions(+), 30 deletions(-) diff --git a/scripts/artifacts/WhatsApp.py b/scripts/artifacts/WhatsApp.py index 7d9c2832..b581f268 100755 --- a/scripts/artifacts/WhatsApp.py +++ b/scripts/artifacts/WhatsApp.py @@ -144,7 +144,7 @@ def get_WhatsApp(files_found, report_folder, seeker, wrap_text): SELECT datetime(messages.timestamp/1000,'unixepoch') AS message_timestamp, case messages.received_timestamp - WHEN 0 THEN 'N/A' + WHEN 0 THEN '' ELSE datetime(messages.received_timestamp/1000,'unixepoch') end as received_timestamp, messages.key_remote_jid AS id, @@ -212,9 +212,13 @@ def get_WhatsApp(files_found, report_folder, seeker, wrap_text): try: cursor.execute(''' SELECT - datetime(message.timestamp/1000,'unixepoch') AS "Message Time", CASE - WHEN datetime(message.received_timestamp/1000,'unixepoch')="1970-01-01 00:00:00" THEN "N/A" + WHEN message.timestamp = 0 then '' + ELSE + datetime(message.timestamp/1000,'unixepoch') + END AS "Message Time", + CASE + WHEN message.received_timestamp = 0 then '' ELSE datetime(message.received_timestamp/1000,'unixepoch') END AS "Time Message Received", @@ -295,9 +299,13 @@ def get_WhatsApp(files_found, report_folder, seeker, wrap_text): try: cursor.execute(''' SELECT - datetime(message.timestamp/1000,'unixepoch') AS "Message Time", CASE - WHEN datetime(message.received_timestamp/1000,'unixepoch')="1970-01-01 00:00:00" THEN "N/A" + WHEN message.timestamp = 0 then '' + ELSE + datetime(message.timestamp/1000,'unixepoch') + END AS "Message Time", + CASE + WHEN message.received_timestamp = 0 then '' ELSE datetime(message.received_timestamp/1000,'unixepoch') END AS "Time Message Received", diff --git a/scripts/artifacts/downloads.py b/scripts/artifacts/downloads.py index 5d0376bf..1ebc405a 100644 --- a/scripts/artifacts/downloads.py +++ b/scripts/artifacts/downloads.py @@ -15,7 +15,9 @@ def get_downloads(files_found, report_folder, seeker, wrap_text): for file_found in files_found: file_found = str(file_found) if not os.path.basename(file_found) == 'downloads.db': # skip -journal and other files - continue + continue + else: + break db = open_sqlite_db_readonly(file_found) diff --git a/scripts/artifacts/mewe.py b/scripts/artifacts/mewe.py index 63cf141a..ba588c65 100755 --- a/scripts/artifacts/mewe.py +++ b/scripts/artifacts/mewe.py @@ -40,7 +40,7 @@ def _perform_query(cursor, query): return 0, None -def _make_reports(title, data_headers, data_list, report_folder, db_file_name): +def _make_reports(title, data_headers, data_list, report_folder, db_file_name, tl_bool): report = ArtifactHtmlReport(title) report.start_artifact_report(report_folder, title) report.add_script() @@ -49,7 +49,8 @@ def _make_reports(title, data_headers, data_list, report_folder, db_file_name): tsv(report_folder, data_headers, data_list, title, db_file_name) - timeline(report_folder, title, data_list, data_headers) + if tl_bool == True: + timeline(report_folder, title, data_list, data_headers) def _parse_xml(xml_file, xml_file_name, report_folder, title, report_name): @@ -73,7 +74,9 @@ def _parse_xml(xml_file, xml_file_name, report_folder, title, report_name): data_list.append((node.attrib['name'], value)) - _make_reports(f'{APP_NAME} - {report_name}', data_headers, data_list, report_folder, xml_file_name) + tl_bool = False + + _make_reports(f'{APP_NAME} - {report_name}', data_headers, data_list, report_folder, xml_file_name, tl_bool) def _parse_chat_messages(messages_count, rows, report_folder, db_file_name): @@ -89,7 +92,9 @@ def _parse_chat_messages(messages_count, rows, report_folder, db_file_name): row[6], row[7], row[8] if row[8] else '', row[9] ) for row in rows] - _make_reports(f'{APP_NAME} - Chat', data_headers, data_list, report_folder, db_file_name) + tl_bool = True + + _make_reports(f'{APP_NAME} - Chat', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_app_database(db_file, db_file_name, report_folder): diff --git a/scripts/artifacts/snapchat.py b/scripts/artifacts/snapchat.py index 21b51426..64ce7b8a 100755 --- a/scripts/artifacts/snapchat.py +++ b/scripts/artifacts/snapchat.py @@ -32,15 +32,15 @@ # field indicates when the user was created FRIEND_QUERY = ''' SELECT + case addedTimestamp + when 0 then '' + else datetime(addedTimestamp/1000, 'unixepoch', 'localtime') + end, username, userId, displayName, phone, - birthday, - case addedTimestamp - when 0 then '' - else datetime(addedTimestamp/1000, 'unixepoch', 'localtime') - end + birthday FROM Friend WHERE addedTimestamp IS NOT NULL; ''' @@ -86,10 +86,10 @@ SNAP_MEDIA_QUERY = ''' SELECT + DATETIME(create_time/1000, 'unixepoch', 'localtime'), memories_snap._id, media_id, memories_entry_id, - DATETIME(create_time/1000, 'unixepoch', 'localtime'), time_zone_id, format, width, @@ -155,7 +155,7 @@ def _perform_query(cursor, query): return 0, None -def _make_reports(title, data_headers, data_list, report_folder, db_file_name): +def _make_reports(title, data_headers, data_list, report_folder, db_file_name, tl_bool): report = ArtifactHtmlReport(title) report.start_artifact_report(report_folder, title) report.add_script() @@ -163,9 +163,8 @@ def _make_reports(title, data_headers, data_list, report_folder, db_file_name): report.end_artifact_report() tsv(report_folder, data_headers, data_list, title, db_file_name) - - timeline(report_folder, title, data_list, data_headers) - + if tl_bool == True: + timeline(report_folder, title, data_list, data_headers) def _parse_feeds(feeds_count, rows, report_folder, db_file_name): logfunc(f'{feeds_count} feeds found') @@ -178,22 +177,26 @@ def _parse_feeds(feeds_count, rows, report_folder, db_file_name): data_list = [( row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7] ) for row in rows] + + tl_bool = True - _make_reports(f'{APP_NAME} - Feeds', data_headers, data_list, report_folder, db_file_name) + _make_reports(f'{APP_NAME} - Feeds', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_friends(friends_count, rows, report_folder, db_file_name): logfunc(f'{friends_count} friends found') data_headers = ( - 'Username', 'User ID', 'Display Name', 'Phone Nr', - 'Birthday', 'Added Timestamp' + 'Added Timestamp', 'Username', 'User ID', 'Display Name', 'Phone Nr', + 'Birthday' ) data_list = [( row[0], row[1], row[2], row[3], row[4], row[5] ) for row in rows] + + tl_bool = True - _make_reports(f'{APP_NAME} - Friends', data_headers, data_list, report_folder, db_file_name) + _make_reports(f'{APP_NAME} - Friends', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_messages(messages_count, rows, report_folder, db_file_name): @@ -208,7 +211,9 @@ def _parse_messages(messages_count, rows, report_folder, db_file_name): _get_text_from_blob(row[6], 0x2c, 0x28, row[5]) ) for row in rows] - _make_reports(f'{APP_NAME} - Messages', data_headers, data_list, report_folder, db_file_name) + tl_bool = True + + _make_reports(f'{APP_NAME} - Messages', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_memories_entry(memories_count, rows, report_folder, db_file_name): @@ -222,7 +227,9 @@ def _parse_memories_entry(memories_count, rows, report_folder, db_file_name): row[3], _get_text_from_blob(row[4], 0x20, 0x1c) ) for row in rows] - _make_reports(f'{APP_NAME} - Memories', data_headers, data_list, report_folder, db_file_name) + tl_bool = True + + _make_reports(f'{APP_NAME} - Memories', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_meo(meo_count, rows, report_folder, db_file_name): @@ -237,15 +244,17 @@ def _parse_meo(meo_count, rows, report_folder, db_file_name): data_list = [( row[0], row[1], _decrypt_meo_code(row[1]), row[2], row[3] ) for row in rows] + + tl_bool = False - _make_reports(f'{APP_NAME} - MEO (My Eyes Only)', data_headers, data_list, report_folder, db_file_name) + _make_reports(f'{APP_NAME} - MEO (My Eyes Only)', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_snap_media(snap_media_count, rows, report_folder, db_file_name): logfunc(f'{snap_media_count} Snap Media found') data_headers = ( - 'ID', 'Media ID', 'Memories Entry ID', 'Create Time', 'Time Zone ID', 'Format', + 'Create Time', 'ID', 'Media ID', 'Memories Entry ID', 'Time Zone ID', 'Format', 'Width', 'Heigth', 'Duration', 'Has Overlay', 'Overlay Size', 'Overlay Info', 'Front Facing', 'Size', 'Has Location Info', 'Latitude', 'Longitude', 'Snap User Agent', 'Thumbnail Size', 'Thumbnail Info' @@ -255,8 +264,10 @@ def _parse_snap_media(snap_media_count, rows, report_folder, db_file_name): row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17], row[18], row[19] ) for row in rows] + + tl_bool = True - _make_reports(f'{APP_NAME} - Snap Media', data_headers, data_list, report_folder, db_file_name) + _make_reports(f'{APP_NAME} - Snap Media', data_headers, data_list, report_folder, db_file_name, tl_bool) def _parse_main_db(db_file, db_file_name, report_folder): @@ -334,7 +345,9 @@ def _parse_xml(xml_file, xml_file_name, report_folder, title, report_name): data_list.append((node.attrib['name'], value)) - _make_reports(f'{APP_NAME} - {report_name}', data_headers, data_list, report_folder, xml_file_name) + tl_bool = False + + _make_reports(f'{APP_NAME} - {report_name}', data_headers, data_list, report_folder, xml_file_name, tl_bool) def get_snapchat(files_found, report_folder, seeker, wrap_text):