noznarb's picture
Upload 14 files
07baa2c
raw
history blame
1.63 kB
import json
from typing import List, Tuple
from concurrent.futures import ProcessPoolExecutor, as_completed
import logging
from hashlib import md5
from tqdm import tqdm
# Percentage of similarity between two conversations to be considered a duplicate
similarity_threshold = 80
def remove_duplicates(conversations: List[dict]) -> List[dict]:
unique_ids = {}
unique_hashes = set()
with ProcessPoolExecutor() as executor:
futures = {executor.submit(check_unique, conversation, unique_hashes): conversation for conversation in conversations}
total_tasks = len(futures)
for future in tqdm(as_completed(futures), total=total_tasks, desc="Deduplicating", unit="conversations"):
is_unique, conversation = future.result()
if is_unique:
id_ = conversation.pop('id')
hash_ = conversation_hash(conversation)
unique_ids[hash_] = (id_, conversation)
unique_hashes.add(hash_)
else:
logging.debug(f"Duplicate found: {conversation}")
executor.shutdown(wait=True)
return [{'id': unique_ids[hash_][0], **unique_ids[hash_][1]} for hash_ in unique_hashes]
def check_unique(conversation: dict, unique_hashes: set) -> Tuple[bool, dict]:
hash_ = conversation_hash(conversation)
if hash_ in unique_hashes:
return False, conversation
return True, conversation
def conversation_hash(conversation: dict) -> str:
set_ = frozenset((msg['value'] for msg in conversation['conversations']))
return md5(json.dumps(sorted(list(set_))).encode()).hexdigest()