import re
from concurrent.futures import ThreadPoolExecutor
import jieba
import json

class DataAnalysis:
    @staticmethod
    def clean_text(text):
        clean = re.compile('<.*?>')
        return re.sub(clean, '', text)

    def parallel_clean(self, texts):
        with ThreadPoolExecutor(max_workers=10) as executor:
            results = list(executor.map(self.clean_text, texts))
        return results

    @staticmethod
    def tokenize_text(text):
        return jieba.lcut(text)

    def parallel_tokenize(self, texts):
        with ThreadPoolExecutor(max_workers=10) as executor:
            results = list(executor.map(self.tokenize_text, texts))
        return results

    @staticmethod
    def save_to_json(data, filename):
        with open(filename, 'w', encoding='utf-8') as f:
            json.dump(data, f, ensure_ascii=False, indent=4)