mirror of
https://github.com/KnugiHK/WhatsApp-Chat-Exporter.git
synced 2026-04-29 01:04:58 +00:00
Merge pull request #149 from fschuh/main_test
Support for incremental merges of two export folders
This commit is contained in:
2
.gitignore
vendored
2
.gitignore
vendored
@@ -138,7 +138,9 @@ __main__
|
||||
|
||||
# Dev time intermidiates & temp files
|
||||
result/
|
||||
output/
|
||||
WhatsApp/
|
||||
AppDomainGroup-group.net.whatsapp.WhatsApp.shared/
|
||||
/*.db
|
||||
/*.db-*
|
||||
/myout
|
||||
|
||||
13
README.md
13
README.md
@@ -231,6 +231,19 @@ Contact Enrichment:
|
||||
Use with --enrich-from-vcards. When numbers in the vcf file does not have a country code, this
|
||||
will be used. 1 is for US, 66 for Thailand etc. Most likely use the number of your own country
|
||||
|
||||
Incremental Merging:
|
||||
--incremental-merge Performs an incremental merge of two exports. Requires setting both --source-
|
||||
dir and --target-dir. The chats (JSON files only) and media from the source
|
||||
directory will be merged into the target directory. No chat messages or media
|
||||
will be deleted from the target directory; only new chat messages and media
|
||||
will be added to it. This enables chat messages and media to be deleted from
|
||||
the device to free up space, while ensuring they are preserved in the exported
|
||||
backups.
|
||||
--source-dir SOURCE_DIR
|
||||
Sets the source directory. Used for performing incremental merges.
|
||||
--target-dir TARGET_DIR
|
||||
Sets the target directory. Used for performing incremental merges.
|
||||
|
||||
Miscellaneous:
|
||||
-s, --showkey Show the HEX key used to decrypt the database
|
||||
--check-update Check for updates (require Internet access)
|
||||
|
||||
@@ -13,7 +13,7 @@ from Whatsapp_Chat_Exporter import ios_handler, ios_media_handler
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatCollection, ChatStore
|
||||
from Whatsapp_Chat_Exporter.utility import APPLE_TIME, Crypt, check_update, DbType
|
||||
from Whatsapp_Chat_Exporter.utility import readable_to_bytes, sanitize_filename
|
||||
from Whatsapp_Chat_Exporter.utility import import_from_json, bytes_to_readable
|
||||
from Whatsapp_Chat_Exporter.utility import import_from_json, incremental_merge, bytes_to_readable
|
||||
from argparse import ArgumentParser, SUPPRESS
|
||||
from datetime import datetime
|
||||
from getpass import getpass
|
||||
@@ -207,6 +207,33 @@ def setup_argument_parser() -> ArgumentParser:
|
||||
help="Use with --enrich-from-vcards. When numbers in the vcf file does not have a country code, this will be used. 1 is for US, 66 for Thailand etc. Most likely use the number of your own country"
|
||||
)
|
||||
|
||||
# Incremental merging
|
||||
inc_merging_group = parser.add_argument_group('Incremental Merging')
|
||||
inc_merging_group.add_argument(
|
||||
"--incremental-merge",
|
||||
dest="incremental_merge",
|
||||
default=False,
|
||||
action='store_true',
|
||||
help=("Performs an incremental merge of two exports. "
|
||||
"Requires setting both --source-dir and --target-dir. "
|
||||
"The chats (JSON files only) and media from the source directory will be merged into the target directory. "
|
||||
"No chat messages or media will be deleted from the target directory; only new chat messages and media will be added to it. "
|
||||
"This enables chat messages and media to be deleted from the device to free up space, while ensuring they are preserved in the exported backups."
|
||||
)
|
||||
)
|
||||
inc_merging_group.add_argument(
|
||||
"--source-dir",
|
||||
dest="source_dir",
|
||||
default=None,
|
||||
help="Sets the source directory. Used for performing incremental merges."
|
||||
)
|
||||
inc_merging_group.add_argument(
|
||||
"--target-dir",
|
||||
dest="target_dir",
|
||||
default=None,
|
||||
help="Sets the target directory. Used for performing incremental merges."
|
||||
)
|
||||
|
||||
# Miscellaneous
|
||||
misc_group = parser.add_argument_group('Miscellaneous')
|
||||
misc_group.add_argument(
|
||||
@@ -245,11 +272,16 @@ def validate_args(parser: ArgumentParser, args) -> None:
|
||||
if not args.android and not args.ios and not args.exported and not args.import_json:
|
||||
parser.error("You must define the device type.")
|
||||
if args.no_html and not args.json and not args.text_format:
|
||||
parser.error("You must either specify a JSON output file, text file output directory or enable HTML output.")
|
||||
parser.error(
|
||||
"You must either specify a JSON output file, text file output directory or enable HTML output.")
|
||||
if args.import_json and (args.android or args.ios or args.exported or args.no_html):
|
||||
parser.error("You can only use --import with -j and without --no-html, -a, -i, -e.")
|
||||
parser.error(
|
||||
"You can only use --import with -j and without --no-html, -a, -i, -e.")
|
||||
elif args.import_json and not os.path.isfile(args.json):
|
||||
parser.error("JSON file not found.")
|
||||
if args.incremental_merge and (args.source_dir is None or args.target_dir is None):
|
||||
parser.error(
|
||||
"You must specify both --source-dir and --target-dir for incremental merge.")
|
||||
if args.android and args.business:
|
||||
parser.error("WhatsApp Business is only available on iOS for now.")
|
||||
if "??" not in args.headline:
|
||||
@@ -260,18 +292,21 @@ def validate_args(parser: ArgumentParser, args) -> None:
|
||||
(args.json.endswith(".json") and os.path.isfile(args.json)) or
|
||||
(not args.json.endswith(".json") and os.path.isfile(args.json))
|
||||
):
|
||||
parser.error("When --per-chat is enabled, the destination of --json must be a directory.")
|
||||
parser.error(
|
||||
"When --per-chat is enabled, the destination of --json must be a directory.")
|
||||
|
||||
# vCards validation
|
||||
if args.enrich_from_vcards is not None and args.default_country_code is None:
|
||||
parser.error("When --enrich-from-vcards is provided, you must also set --default-country-code")
|
||||
parser.error(
|
||||
"When --enrich-from-vcards is provided, you must also set --default-country-code")
|
||||
|
||||
# Size validation
|
||||
if args.size is not None and not isinstance(args.size, int) and not args.size.isnumeric():
|
||||
try:
|
||||
args.size = readable_to_bytes(args.size)
|
||||
except ValueError:
|
||||
parser.error("The value for --split must be ended in pure bytes or with a proper unit (e.g., 1048576 or 1MB)")
|
||||
parser.error(
|
||||
"The value for --split must be ended in pure bytes or with a proper unit (e.g., 1048576 or 1MB)")
|
||||
|
||||
# Date filter validation and processing
|
||||
if args.filter_date is not None:
|
||||
@@ -287,7 +322,8 @@ def validate_args(parser: ArgumentParser, args) -> None:
|
||||
|
||||
# Chat filter validation
|
||||
if args.filter_chat_include is not None and args.filter_chat_exclude is not None:
|
||||
parser.error("Chat inclusion and exclusion filters cannot be used together.")
|
||||
parser.error(
|
||||
"Chat inclusion and exclusion filters cannot be used together.")
|
||||
|
||||
validate_chat_filters(parser, args.filter_chat_include)
|
||||
validate_chat_filters(parser, args.filter_chat_exclude)
|
||||
@@ -298,20 +334,23 @@ def validate_chat_filters(parser: ArgumentParser, chat_filter: Optional[List[str
|
||||
if chat_filter is not None:
|
||||
for chat in chat_filter:
|
||||
if not chat.isnumeric():
|
||||
parser.error("Enter a phone number in the chat filter. See https://wts.knugi.dev/docs?dest=chat")
|
||||
parser.error(
|
||||
"Enter a phone number in the chat filter. See https://wts.knugi.dev/docs?dest=chat")
|
||||
|
||||
|
||||
def process_date_filter(parser: ArgumentParser, args) -> None:
|
||||
"""Process and validate date filter arguments."""
|
||||
if " - " in args.filter_date:
|
||||
start, end = args.filter_date.split(" - ")
|
||||
start = int(datetime.strptime(start, args.filter_date_format).timestamp())
|
||||
start = int(datetime.strptime(
|
||||
start, args.filter_date_format).timestamp())
|
||||
end = int(datetime.strptime(end, args.filter_date_format).timestamp())
|
||||
|
||||
if start < 1009843200 or end < 1009843200:
|
||||
parser.error("WhatsApp was first released in 2009...")
|
||||
if start > end:
|
||||
parser.error("The start date cannot be a moment after the end date.")
|
||||
parser.error(
|
||||
"The start date cannot be a moment after the end date.")
|
||||
|
||||
if args.android:
|
||||
args.filter_date = f"BETWEEN {start}000 AND {end}000"
|
||||
@@ -324,9 +363,11 @@ def process_date_filter(parser: ArgumentParser, args) -> None:
|
||||
def process_single_date_filter(parser: ArgumentParser, args) -> None:
|
||||
"""Process single date comparison filters."""
|
||||
if len(args.filter_date) < 3:
|
||||
parser.error("Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
||||
parser.error(
|
||||
"Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
||||
|
||||
_timestamp = int(datetime.strptime(args.filter_date[2:], args.filter_date_format).timestamp())
|
||||
_timestamp = int(datetime.strptime(
|
||||
args.filter_date[2:], args.filter_date_format).timestamp())
|
||||
|
||||
if _timestamp < 1009843200:
|
||||
parser.error("WhatsApp was first released in 2009...")
|
||||
@@ -342,7 +383,8 @@ def process_single_date_filter(parser: ArgumentParser, args) -> None:
|
||||
elif args.ios:
|
||||
args.filter_date = f"<= {_timestamp - APPLE_TIME}"
|
||||
else:
|
||||
parser.error("Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
||||
parser.error(
|
||||
"Unsupported date format. See https://wts.knugi.dev/docs?dest=date")
|
||||
|
||||
|
||||
def setup_contact_store(args) -> Optional['ContactsFromVCards']:
|
||||
@@ -356,7 +398,8 @@ def setup_contact_store(args) -> Optional['ContactsFromVCards']:
|
||||
)
|
||||
exit(1)
|
||||
contact_store = ContactsFromVCards()
|
||||
contact_store.load_vcf_file(args.enrich_from_vcards, args.default_country_code)
|
||||
contact_store.load_vcf_file(
|
||||
args.enrich_from_vcards, args.default_country_code)
|
||||
return contact_store
|
||||
return None
|
||||
|
||||
@@ -513,7 +556,8 @@ def handle_media_directory(args) -> None:
|
||||
media_path = os.path.join(args.output, args.media)
|
||||
|
||||
if os.path.isdir(media_path):
|
||||
print("\nWhatsApp directory already exists in output directory. Skipping...", end="\n")
|
||||
print(
|
||||
"\nWhatsApp directory already exists in output directory. Skipping...", end="\n")
|
||||
else:
|
||||
if args.move_media:
|
||||
try:
|
||||
@@ -708,9 +752,11 @@ def main():
|
||||
# Extract media from backup if needed
|
||||
if args.backup is not None:
|
||||
if not os.path.isdir(args.media):
|
||||
ios_media_handler.extract_media(args.backup, identifiers, args.decrypt_chunk_size)
|
||||
ios_media_handler.extract_media(
|
||||
args.backup, identifiers, args.decrypt_chunk_size)
|
||||
else:
|
||||
print("WhatsApp directory already exists, skipping WhatsApp file extraction.")
|
||||
print(
|
||||
"WhatsApp directory already exists, skipping WhatsApp file extraction.")
|
||||
|
||||
# Set default DB paths if not provided
|
||||
if args.db is None:
|
||||
@@ -718,6 +764,16 @@ def main():
|
||||
if args.wa is None:
|
||||
args.wa = "ContactsV2.sqlite"
|
||||
|
||||
if args.incremental_merge:
|
||||
incremental_merge(
|
||||
args.source_dir,
|
||||
args.target_dir,
|
||||
args.media,
|
||||
args.pretty_print_json,
|
||||
args.avoid_encoding_json
|
||||
)
|
||||
print("Incremental merge completed successfully.")
|
||||
else:
|
||||
# Process contacts
|
||||
process_contacts(args, data, contact_store)
|
||||
|
||||
@@ -731,3 +787,7 @@ def main():
|
||||
handle_media_directory(args)
|
||||
|
||||
print("Everything is done!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -7,6 +7,7 @@ class Timing:
|
||||
"""
|
||||
Handles timestamp formatting with timezone support.
|
||||
"""
|
||||
|
||||
def __init__(self, timezone_offset: Optional[int]) -> None:
|
||||
"""
|
||||
Initialize Timing object.
|
||||
@@ -27,7 +28,7 @@ class Timing:
|
||||
Returns:
|
||||
Optional[str]: Formatted timestamp string, or None if timestamp is None
|
||||
"""
|
||||
if timestamp:
|
||||
if timestamp is not None:
|
||||
timestamp = timestamp / 1000 if timestamp > 9999999999 else timestamp
|
||||
return datetime.fromtimestamp(timestamp, TimeZone(self.timezone_offset)).strftime(format)
|
||||
return None
|
||||
@@ -37,6 +38,7 @@ class TimeZone(tzinfo):
|
||||
"""
|
||||
Custom timezone class with fixed offset.
|
||||
"""
|
||||
|
||||
def __init__(self, offset: int) -> None:
|
||||
"""
|
||||
Initialize TimeZone object.
|
||||
@@ -151,6 +153,7 @@ class ChatStore:
|
||||
"""
|
||||
Stores chat information and messages.
|
||||
"""
|
||||
|
||||
def __init__(self, type: str, name: Optional[str] = None, media: Optional[str] = None) -> None:
|
||||
"""
|
||||
Initialize ChatStore object.
|
||||
@@ -211,9 +214,24 @@ class ChatStore:
|
||||
'their_avatar': self.their_avatar,
|
||||
'their_avatar_thumb': self.their_avatar_thumb,
|
||||
'status': self.status,
|
||||
'media_base': self.media_base,
|
||||
'messages': {id: msg.to_json() for id, msg in self._messages.items()}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, data: Dict) -> 'ChatStore':
|
||||
"""Create a chat store from JSON data."""
|
||||
chat = cls(data.get("type"), data.get("name"))
|
||||
chat.my_avatar = data.get("my_avatar")
|
||||
chat.their_avatar = data.get("their_avatar")
|
||||
chat.their_avatar_thumb = data.get("their_avatar_thumb")
|
||||
chat.status = data.get("status")
|
||||
chat.media_base = data.get("media_base")
|
||||
for id, msg_data in data.get("messages", {}).items():
|
||||
message = Message.from_json(msg_data)
|
||||
chat.add_message(id, message)
|
||||
return chat
|
||||
|
||||
def get_last_message(self) -> 'Message':
|
||||
"""Get the most recent message in the chat."""
|
||||
return tuple(self._messages.values())[-1]
|
||||
@@ -230,18 +248,40 @@ class ChatStore:
|
||||
"""Get all message keys in the chat."""
|
||||
return self._messages.keys()
|
||||
|
||||
def merge_with(self, other: 'ChatStore'):
|
||||
"""Merge another ChatStore into this one.
|
||||
|
||||
Args:
|
||||
other (ChatStore): The ChatStore to merge with
|
||||
|
||||
"""
|
||||
if not isinstance(other, ChatStore):
|
||||
raise TypeError("Can only merge with another ChatStore object")
|
||||
|
||||
# Update fields if they are not None in the other ChatStore
|
||||
self.name = other.name or self.name
|
||||
self.type = other.type or self.type
|
||||
self.my_avatar = other.my_avatar or self.my_avatar
|
||||
self.their_avatar = other.their_avatar or self.their_avatar
|
||||
self.their_avatar_thumb = other.their_avatar_thumb or self.their_avatar_thumb
|
||||
self.status = other.status or self.status
|
||||
|
||||
# Merge messages
|
||||
self._messages.update(other._messages)
|
||||
|
||||
|
||||
class Message:
|
||||
"""
|
||||
Represents a single message in a chat.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
from_me: Union[bool, int],
|
||||
timestamp: int,
|
||||
time: Union[int, float, str],
|
||||
key_id: int,
|
||||
key_id: Union[int, str],
|
||||
received_timestamp: int,
|
||||
read_timestamp: int,
|
||||
timezone_offset: int = 0,
|
||||
@@ -281,9 +321,21 @@ class Message:
|
||||
self.sender = None
|
||||
self.safe = False
|
||||
self.mime = None
|
||||
self.message_type = message_type,
|
||||
self.received_timestamp = timing.format_timestamp(received_timestamp, "%Y/%m/%d %H:%M")
|
||||
self.read_timestamp = timing.format_timestamp(read_timestamp, "%Y/%m/%d %H:%M")
|
||||
self.message_type = message_type
|
||||
if isinstance(received_timestamp, (int, float)):
|
||||
self.received_timestamp = timing.format_timestamp(
|
||||
received_timestamp, "%Y/%m/%d %H:%M")
|
||||
elif isinstance(received_timestamp, str):
|
||||
self.received_timestamp = received_timestamp
|
||||
else:
|
||||
self.received_timestamp = None
|
||||
if isinstance(read_timestamp, (int, float)):
|
||||
self.read_timestamp = timing.format_timestamp(
|
||||
read_timestamp, "%Y/%m/%d %H:%M")
|
||||
elif isinstance(read_timestamp, str):
|
||||
self.read_timestamp = read_timestamp
|
||||
else:
|
||||
self.read_timestamp = None
|
||||
|
||||
# Extra attributes
|
||||
self.reply = None
|
||||
@@ -309,5 +361,32 @@ class Message:
|
||||
'quoted_data': self.quoted_data,
|
||||
'caption': self.caption,
|
||||
'thumb': self.thumb,
|
||||
'sticker': self.sticker
|
||||
'sticker': self.sticker,
|
||||
'message_type': self.message_type,
|
||||
'received_timestamp': self.received_timestamp,
|
||||
'read_timestamp': self.read_timestamp
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, data: Dict) -> 'Message':
|
||||
message = cls(
|
||||
from_me=data["from_me"],
|
||||
timestamp=data["timestamp"],
|
||||
time=data["time"],
|
||||
key_id=data["key_id"],
|
||||
message_type=data.get("message_type"),
|
||||
received_timestamp=data.get("received_timestamp"),
|
||||
read_timestamp=data.get("read_timestamp")
|
||||
)
|
||||
message.media = data.get("media")
|
||||
message.meta = data.get("meta")
|
||||
message.data = data.get("data")
|
||||
message.sender = data.get("sender")
|
||||
message.safe = data.get("safe")
|
||||
message.mime = data.get("mime")
|
||||
message.reply = data.get("reply")
|
||||
message.quoted_data = data.get("quoted_data")
|
||||
message.caption = data.get("caption")
|
||||
message.thumb = data.get("thumb")
|
||||
message.sticker = data.get("sticker")
|
||||
return message
|
||||
|
||||
@@ -5,6 +5,7 @@ import os
|
||||
import unicodedata
|
||||
import re
|
||||
import math
|
||||
import shutil
|
||||
from bleach import clean as sanitize
|
||||
from markupsafe import Markup
|
||||
from datetime import datetime, timedelta
|
||||
@@ -15,8 +16,9 @@ try:
|
||||
from enum import StrEnum, IntEnum
|
||||
except ImportError:
|
||||
# < Python 3.11
|
||||
# This should be removed when the support for Python 3.10 ends.
|
||||
# This should be removed when the support for Python 3.10 ends. (31 Oct 2026)
|
||||
from enum import Enum
|
||||
|
||||
class StrEnum(str, Enum):
|
||||
pass
|
||||
|
||||
@@ -154,7 +156,8 @@ def check_update():
|
||||
else:
|
||||
with raw:
|
||||
package_info = json.load(raw)
|
||||
latest_version = tuple(map(int, package_info["info"]["version"].split(".")))
|
||||
latest_version = tuple(
|
||||
map(int, package_info["info"]["version"].split(".")))
|
||||
__version__ = importlib.metadata.version("whatsapp_chat_exporter")
|
||||
current_version = tuple(map(int, __version__.split(".")))
|
||||
if current_version < latest_version:
|
||||
@@ -183,7 +186,7 @@ def rendering(
|
||||
headline,
|
||||
next=False,
|
||||
previous=False
|
||||
):
|
||||
):
|
||||
if chat.their_avatar_thumb is None and chat.their_avatar is not None:
|
||||
their_avatar_thumb = chat.their_avatar
|
||||
else:
|
||||
@@ -255,7 +258,89 @@ def import_from_json(json_file: str, data: Dict[str, ChatStore]):
|
||||
message.sticker = msg.get("sticker")
|
||||
chat.add_message(id, message)
|
||||
data[jid] = chat
|
||||
print(f"Importing chats from JSON...({index + 1}/{total_row_number})", end="\r")
|
||||
print(
|
||||
f"Importing chats from JSON...({index + 1}/{total_row_number})", end="\r")
|
||||
|
||||
|
||||
def incremental_merge(source_dir: str, target_dir: str, media_dir: str, pretty_print_json: int, avoid_encoding_json: bool):
|
||||
"""Merges JSON files from the source directory into the target directory.
|
||||
|
||||
Args:
|
||||
source_dir (str): The path to the source directory containing JSON files.
|
||||
target_dir (str): The path to the target directory to merge into.
|
||||
media_dir (str): The path to the media directory.
|
||||
"""
|
||||
json_files = [f for f in os.listdir(source_dir) if f.endswith('.json')]
|
||||
if not json_files:
|
||||
print("No JSON files found in the source directory.")
|
||||
return
|
||||
|
||||
print("JSON files found:", json_files)
|
||||
|
||||
for json_file in json_files:
|
||||
source_path = os.path.join(source_dir, json_file)
|
||||
target_path = os.path.join(target_dir, json_file)
|
||||
|
||||
if not os.path.exists(target_path):
|
||||
print(f"Copying '{json_file}' to target directory...")
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
shutil.copy2(source_path, target_path)
|
||||
else:
|
||||
print(
|
||||
f"Merging '{json_file}' with existing file in target directory...")
|
||||
with open(source_path, 'r') as src_file, open(target_path, 'r') as tgt_file:
|
||||
source_data = json.load(src_file)
|
||||
target_data = json.load(tgt_file)
|
||||
|
||||
# Parse JSON into ChatStore objects using from_json()
|
||||
source_chats = {jid: ChatStore.from_json(
|
||||
chat) for jid, chat in source_data.items()}
|
||||
target_chats = {jid: ChatStore.from_json(
|
||||
chat) for jid, chat in target_data.items()}
|
||||
|
||||
# Merge chats using merge_with()
|
||||
for jid, chat in source_chats.items():
|
||||
if jid in target_chats:
|
||||
target_chats[jid].merge_with(chat)
|
||||
else:
|
||||
target_chats[jid] = chat
|
||||
|
||||
# Serialize merged data
|
||||
merged_data = {jid: chat.to_json()
|
||||
for jid, chat in target_chats.items()}
|
||||
|
||||
# Check if the merged data differs from the original target data
|
||||
if json.dumps(merged_data, sort_keys=True) != json.dumps(target_data, sort_keys=True):
|
||||
print(
|
||||
f"Changes detected in '{json_file}', updating target file...")
|
||||
with open(target_path, 'w') as merged_file:
|
||||
json.dump(
|
||||
merged_data,
|
||||
merged_file,
|
||||
indent=pretty_print_json,
|
||||
ensure_ascii=not avoid_encoding_json,
|
||||
)
|
||||
else:
|
||||
print(
|
||||
f"No changes detected in '{json_file}', skipping update.")
|
||||
|
||||
# Merge media directories
|
||||
source_media_path = os.path.join(source_dir, media_dir)
|
||||
target_media_path = os.path.join(target_dir, media_dir)
|
||||
print(
|
||||
f"Merging media directories. Source: {source_media_path}, target: {target_media_path}")
|
||||
if os.path.exists(source_media_path):
|
||||
for root, _, files in os.walk(source_media_path):
|
||||
relative_path = os.path.relpath(root, source_media_path)
|
||||
target_root = os.path.join(target_media_path, relative_path)
|
||||
os.makedirs(target_root, exist_ok=True)
|
||||
for file in files:
|
||||
source_file = os.path.join(root, file)
|
||||
target_file = os.path.join(target_root, file)
|
||||
# we only copy if the file doesn't exist in the target or if the source is newer
|
||||
if not os.path.exists(target_file) or os.path.getmtime(source_file) > os.path.getmtime(target_file):
|
||||
print(f"Copying '{source_file}' to '{target_file}'...")
|
||||
shutil.copy2(source_file, target_file)
|
||||
|
||||
|
||||
def sanitize_filename(file_name: str) -> str:
|
||||
@@ -335,23 +420,29 @@ def get_chat_condition(filter: Optional[List[str]], include: bool, columns: List
|
||||
if filter is not None:
|
||||
conditions = []
|
||||
if len(columns) < 2 and jid is not None:
|
||||
raise ValueError("There must be at least two elements in argument columns if jid is not None")
|
||||
raise ValueError(
|
||||
"There must be at least two elements in argument columns if jid is not None")
|
||||
if jid is not None:
|
||||
if platform == "android":
|
||||
is_group = f"{jid}.type == 1"
|
||||
elif platform == "ios":
|
||||
is_group = f"{jid} IS NOT NULL"
|
||||
else:
|
||||
raise ValueError("Only android and ios are supported for argument platform if jid is not None")
|
||||
raise ValueError(
|
||||
"Only android and ios are supported for argument platform if jid is not None")
|
||||
for index, chat in enumerate(filter):
|
||||
if include:
|
||||
conditions.append(f"{' OR' if index > 0 else ''} {columns[0]} LIKE '%{chat}%'")
|
||||
conditions.append(
|
||||
f"{' OR' if index > 0 else ''} {columns[0]} LIKE '%{chat}%'")
|
||||
if len(columns) > 1:
|
||||
conditions.append(f" OR ({columns[1]} LIKE '%{chat}%' AND {is_group})")
|
||||
conditions.append(
|
||||
f" OR ({columns[1]} LIKE '%{chat}%' AND {is_group})")
|
||||
else:
|
||||
conditions.append(f"{' AND' if index > 0 else ''} {columns[0]} NOT LIKE '%{chat}%'")
|
||||
conditions.append(
|
||||
f"{' AND' if index > 0 else ''} {columns[0]} NOT LIKE '%{chat}%'")
|
||||
if len(columns) > 1:
|
||||
conditions.append(f" AND ({columns[1]} NOT LIKE '%{chat}%' AND {is_group})")
|
||||
conditions.append(
|
||||
f" AND ({columns[1]} NOT LIKE '%{chat}%' AND {is_group})")
|
||||
return f"AND ({' '.join(conditions)})"
|
||||
else:
|
||||
return ""
|
||||
@@ -463,7 +554,8 @@ def determine_metadata(content: sqlite3.Row, init_msg: Optional[str]) -> Optiona
|
||||
elif content["action_type"] == 67:
|
||||
return # (PM) this contact use secure service from Facebook???
|
||||
elif content["action_type"] == 69:
|
||||
return # (PM) this contact use secure service from Facebook??? What's the difference with 67????
|
||||
# (PM) this contact use secure service from Facebook??? What's the difference with 67????
|
||||
return
|
||||
else:
|
||||
return # Unsupported
|
||||
return msg
|
||||
@@ -490,7 +582,8 @@ def get_status_location(output_folder: str, offline_static: str) -> str:
|
||||
w3css_path = os.path.join(static_folder, "w3.css")
|
||||
if not os.path.isfile(w3css_path):
|
||||
with urllib.request.urlopen(w3css) as resp:
|
||||
with open(w3css_path, "wb") as f: f.write(resp.read())
|
||||
with open(w3css_path, "wb") as f:
|
||||
f.write(resp.read())
|
||||
w3css = os.path.join(offline_static, "w3.css")
|
||||
|
||||
|
||||
@@ -521,6 +614,7 @@ def setup_template(template: Optional[str], no_avatar: bool, experimental: bool
|
||||
template_env.filters['sanitize_except'] = sanitize_except
|
||||
return template_env.get_template(template_file)
|
||||
|
||||
|
||||
# iOS Specific
|
||||
APPLE_TIME = 978307200
|
||||
|
||||
@@ -541,24 +635,32 @@ def slugify(value: str, allow_unicode: bool = False) -> str:
|
||||
if allow_unicode:
|
||||
value = unicodedata.normalize('NFKC', value)
|
||||
else:
|
||||
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore').decode('ascii')
|
||||
value = unicodedata.normalize('NFKD', value).encode(
|
||||
'ascii', 'ignore').decode('ascii')
|
||||
value = re.sub(r'[^\w\s-]', '', value.lower())
|
||||
return re.sub(r'[-\s]+', '-', value).strip('-_')
|
||||
|
||||
|
||||
class WhatsAppIdentifier(StrEnum):
|
||||
MESSAGE = "7c7fba66680ef796b916b067077cc246adacf01d" # AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ChatStorage.sqlite
|
||||
CONTACT = "b8548dc30aa1030df0ce18ef08b882cf7ab5212f" # AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ContactsV2.sqlite
|
||||
CALL = "1b432994e958845fffe8e2f190f26d1511534088" # AppDomainGroup-group.net.whatsapp.WhatsApp.shared-CallHistory.sqlite
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ChatStorage.sqlite
|
||||
MESSAGE = "7c7fba66680ef796b916b067077cc246adacf01d"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-ContactsV2.sqlite
|
||||
CONTACT = "b8548dc30aa1030df0ce18ef08b882cf7ab5212f"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsApp.shared-CallHistory.sqlite
|
||||
CALL = "1b432994e958845fffe8e2f190f26d1511534088"
|
||||
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||
|
||||
|
||||
class WhatsAppBusinessIdentifier(StrEnum):
|
||||
MESSAGE = "724bd3b98b18518b455a87c1f3ac3a0d189c4466" # AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ChatStorage.sqlite
|
||||
CONTACT = "d7246a707f51ddf8b17ee2dddabd9e0a4da5c552" # AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ContactsV2.sqlite
|
||||
CALL = "b463f7c4365eefc5a8723930d97928d4e907c603" # AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-CallHistory.sqlite
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ChatStorage.sqlite
|
||||
MESSAGE = "724bd3b98b18518b455a87c1f3ac3a0d189c4466"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-ContactsV2.sqlite
|
||||
CONTACT = "d7246a707f51ddf8b17ee2dddabd9e0a4da5c552"
|
||||
# AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared-CallHistory.sqlite
|
||||
CALL = "b463f7c4365eefc5a8723930d97928d4e907c603"
|
||||
DOMAIN = "AppDomainGroup-group.net.whatsapp.WhatsAppSMB.shared"
|
||||
|
||||
|
||||
class JidType(IntEnum):
|
||||
PM = 0
|
||||
GROUP = 1
|
||||
|
||||
@@ -60,3 +60,8 @@ include = ["Whatsapp_Chat_Exporter"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
Whatsapp_Chat_Exporter = ["*.html"]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"pytest>=8.3.5",
|
||||
]
|
||||
|
||||
341
tests/test_incremental_merge.py
Normal file
341
tests/test_incremental_merge.py
Normal file
@@ -0,0 +1,341 @@
|
||||
import os
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import patch, mock_open, call, MagicMock
|
||||
from Whatsapp_Chat_Exporter.utility import incremental_merge
|
||||
from Whatsapp_Chat_Exporter.data_model import ChatStore
|
||||
|
||||
# Test data setup
|
||||
BASE_PATH = "AppDomainGroup-group.net.whatsapp.WhatsApp.shared"
|
||||
chat_data_1 = {
|
||||
"12345678@s.whatsapp.net": {
|
||||
"name": "Friend",
|
||||
"type": "ios",
|
||||
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||
"their_avatar_thumb": None,
|
||||
"status": None,
|
||||
"messages": {
|
||||
"24690": {
|
||||
"from_me": True,
|
||||
"timestamp": 1463926635.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B7E",
|
||||
"meta": False,
|
||||
"data": "I'm here",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
},
|
||||
"24691": { # This message only exists in target
|
||||
"from_me": False,
|
||||
"timestamp": 1463926641.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B8E",
|
||||
"meta": False,
|
||||
"data": "Great to see you",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
chat_data_2 = {
|
||||
"12345678@s.whatsapp.net": {
|
||||
"name": "Friend",
|
||||
"type": "ios",
|
||||
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||
"their_avatar_thumb": None,
|
||||
"status": None,
|
||||
"messages": {
|
||||
"24690": {
|
||||
"from_me": True,
|
||||
"timestamp": 1463926635.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B7E",
|
||||
"meta": False,
|
||||
"data": "I'm here",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
},
|
||||
"24692": { # This message only exists in source
|
||||
"from_me": False,
|
||||
"timestamp": 1463926642.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B9E",
|
||||
"meta": False,
|
||||
"data": "Hi there!",
|
||||
"safe": False,
|
||||
"sticker": False
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Expected merged data - should contain all messages with all fields initialized as they would be by Message class
|
||||
chat_data_merged = {
|
||||
"12345678@s.whatsapp.net": {
|
||||
"name": "Friend",
|
||||
"type": "ios",
|
||||
"my_avatar": os.path.join(BASE_PATH, "Media", "Profile", "Photo.jpg"),
|
||||
"their_avatar": os.path.join(BASE_PATH, "Media", "Profile", "12345678-1709851420.thumb"),
|
||||
"their_avatar_thumb": None,
|
||||
"status": None,
|
||||
"media_base": None,
|
||||
"messages": {
|
||||
"24690": {
|
||||
"from_me": True,
|
||||
"timestamp": 1463926635.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B7E",
|
||||
"meta": False,
|
||||
"data": "I'm here",
|
||||
"sender": None,
|
||||
"safe": False,
|
||||
"mime": None,
|
||||
"reply": None,
|
||||
"quoted_data": None,
|
||||
"caption": None,
|
||||
"thumb": None,
|
||||
"sticker": False,
|
||||
"message_type": None,
|
||||
"received_timestamp": None,
|
||||
"read_timestamp": None
|
||||
},
|
||||
"24691": {
|
||||
"from_me": False,
|
||||
"timestamp": 1463926641.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B8E",
|
||||
"meta": False,
|
||||
"data": "Great to see you",
|
||||
"sender": None,
|
||||
"safe": False,
|
||||
"mime": None,
|
||||
"reply": None,
|
||||
"quoted_data": None,
|
||||
"caption": None,
|
||||
"thumb": None,
|
||||
"sticker": False,
|
||||
"message_type": None,
|
||||
"received_timestamp": None,
|
||||
"read_timestamp": None
|
||||
},
|
||||
"24692": {
|
||||
"from_me": False,
|
||||
"timestamp": 1463926642.571629,
|
||||
"time": "10:17",
|
||||
"media": False,
|
||||
"key_id": "34B5EF10FBCA37B9E",
|
||||
"meta": False,
|
||||
"data": "Hi there!",
|
||||
"sender": None,
|
||||
"safe": False,
|
||||
"mime": None,
|
||||
"reply": None,
|
||||
"quoted_data": None,
|
||||
"caption": None,
|
||||
"thumb": None,
|
||||
"sticker": False,
|
||||
"message_type": None,
|
||||
"received_timestamp": None,
|
||||
"read_timestamp": None
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_filesystem():
|
||||
with (
|
||||
patch("os.path.exists") as mock_exists,
|
||||
patch("os.makedirs") as mock_makedirs,
|
||||
patch("os.path.getmtime") as mock_getmtime,
|
||||
patch("os.listdir") as mock_listdir,
|
||||
patch("os.walk") as mock_walk,
|
||||
patch("shutil.copy2") as mock_copy2,
|
||||
):
|
||||
yield {
|
||||
"exists": mock_exists,
|
||||
"makedirs": mock_makedirs,
|
||||
"getmtime": mock_getmtime,
|
||||
"listdir": mock_listdir,
|
||||
"walk": mock_walk,
|
||||
"copy2": mock_copy2,
|
||||
}
|
||||
|
||||
|
||||
def test_incremental_merge_new_file(mock_filesystem):
|
||||
"""Test merging when target file doesn't exist"""
|
||||
source_dir = "/source"
|
||||
target_dir = "/target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: x == "/source"
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify the operations
|
||||
mock_filesystem["makedirs"].assert_called_once_with(target_dir, exist_ok=True)
|
||||
mock_filesystem["copy2"].assert_called_once_with(
|
||||
os.path.join(source_dir, "chat.json"),
|
||||
os.path.join(target_dir, "chat.json")
|
||||
)
|
||||
|
||||
|
||||
def test_incremental_merge_existing_file_with_changes(mock_filesystem):
|
||||
"""Test merging when target file exists and has changes"""
|
||||
source_dir = "source"
|
||||
target_dir = "target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: True
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
|
||||
# Mock file operations with consistent path separators
|
||||
source_file = os.path.join(source_dir, "chat.json")
|
||||
target_file = os.path.join(target_dir, "chat.json")
|
||||
mock_file_content = {
|
||||
source_file: json.dumps(chat_data_2),
|
||||
target_file: json.dumps(chat_data_1),
|
||||
}
|
||||
|
||||
written_chunks = []
|
||||
|
||||
def mock_file_write(data):
|
||||
written_chunks.append(data)
|
||||
|
||||
mock_write = MagicMock(side_effect=mock_file_write)
|
||||
|
||||
with patch("builtins.open", mock_open()) as mock_file:
|
||||
def mock_file_read(filename, mode="r"):
|
||||
if mode == 'w':
|
||||
file_mock = mock_open().return_value
|
||||
file_mock.write.side_effect = mock_write
|
||||
return file_mock
|
||||
else:
|
||||
# Use normalized path for lookup
|
||||
norm_filename = os.path.normpath(filename)
|
||||
content = mock_file_content.get(norm_filename, '')
|
||||
file_mock = mock_open(read_data=content).return_value
|
||||
return file_mock
|
||||
|
||||
mock_file.side_effect = mock_file_read
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify file operations using os.path.join
|
||||
mock_file.assert_any_call(source_file, "r")
|
||||
mock_file.assert_any_call(target_file, "r")
|
||||
mock_file.assert_any_call(target_file, "w")
|
||||
|
||||
# Rest of verification code...
|
||||
assert mock_write.called, "Write method was never called"
|
||||
written_data = json.loads(''.join(written_chunks))
|
||||
assert written_data is not None, "No data was written"
|
||||
assert written_data == chat_data_merged, "Merged data does not match expected result"
|
||||
|
||||
messages = written_data["12345678@s.whatsapp.net"]["messages"]
|
||||
assert "24690" in messages, "Common message should be present"
|
||||
assert "24691" in messages, "Target-only message should be preserved"
|
||||
assert "24692" in messages, "Source-only message should be added"
|
||||
assert len(messages) == 3, "Should have exactly 3 messages"
|
||||
|
||||
|
||||
def test_incremental_merge_existing_file_no_changes(mock_filesystem):
|
||||
"""Test merging when target file exists but has no changes"""
|
||||
source_dir = "source"
|
||||
target_dir = "target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: True
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
|
||||
# Mock file operations with consistent path separators
|
||||
source_file = os.path.join(source_dir, "chat.json")
|
||||
target_file = os.path.join(target_dir, "chat.json")
|
||||
mock_file_content = {
|
||||
source_file: json.dumps(chat_data_1),
|
||||
target_file: json.dumps(chat_data_1),
|
||||
}
|
||||
|
||||
with patch("builtins.open", mock_open()) as mock_file:
|
||||
def mock_file_read(filename, mode="r"):
|
||||
if mode == 'w':
|
||||
file_mock = mock_open().return_value
|
||||
return file_mock
|
||||
else:
|
||||
# Use normalized path for lookup
|
||||
norm_filename = os.path.normpath(filename)
|
||||
content = mock_file_content.get(norm_filename, '')
|
||||
file_mock = mock_open(read_data=content).return_value
|
||||
return file_mock
|
||||
|
||||
mock_file.side_effect = mock_file_read
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify no write operations occurred on target file
|
||||
write_calls = [
|
||||
call for call in mock_file.mock_calls if call[0] == "().write"]
|
||||
assert len(write_calls) == 0
|
||||
|
||||
|
||||
def test_incremental_merge_media_copy(mock_filesystem):
|
||||
"""Test media file copying during merge"""
|
||||
source_dir = "source"
|
||||
target_dir = "target"
|
||||
media_dir = "media"
|
||||
|
||||
# Setup mock filesystem
|
||||
mock_filesystem["exists"].side_effect = lambda x: True
|
||||
mock_filesystem["listdir"].return_value = ["chat.json"]
|
||||
mock_filesystem["walk"].return_value = [
|
||||
(os.path.join(source_dir, "media"), ["subfolder"], ["file1.jpg"]),
|
||||
(os.path.join(source_dir, "media", "subfolder"), [], ["file2.jpg"]),
|
||||
]
|
||||
mock_filesystem["getmtime"].side_effect = lambda x: 1000 if "source" in x else 500
|
||||
|
||||
# Mock file operations with consistent path separators
|
||||
source_file = os.path.join(source_dir, "chat.json")
|
||||
target_file = os.path.join(target_dir, "chat.json")
|
||||
mock_file_content = {
|
||||
source_file: json.dumps(chat_data_1),
|
||||
target_file: json.dumps(chat_data_1),
|
||||
}
|
||||
|
||||
with patch("builtins.open", mock_open()) as mock_file:
|
||||
def mock_file_read(filename, mode="r"):
|
||||
if mode == 'w':
|
||||
file_mock = mock_open().return_value
|
||||
return file_mock
|
||||
else:
|
||||
# Use normalized path for lookup
|
||||
norm_filename = os.path.normpath(filename)
|
||||
content = mock_file_content.get(norm_filename, '')
|
||||
file_mock = mock_open(read_data=content).return_value
|
||||
return file_mock
|
||||
|
||||
mock_file.side_effect = mock_file_read
|
||||
|
||||
# Run the function
|
||||
incremental_merge(source_dir, target_dir, media_dir, 2, True)
|
||||
|
||||
# Verify media file operations
|
||||
assert mock_filesystem["makedirs"].call_count >= 2 # At least target dir and media dir
|
||||
assert mock_filesystem["copy2"].call_count == 2 # Two media files copied
|
||||
Reference in New Issue
Block a user