import csv
import json
import re
import sys
from collections import defaultdict

# 增加CSV字段大小限制
# csv.field_size_limit(sys.maxsize)
csv.field_size_limit(2 ** 31 - 1)
# csv.field_size_limit(2**63-1)

def strip_html_tags(text):
    """去除文本中的HTML标签"""
    clean_text = re.sub(r'<[^>]+>', '', text)
    return clean_text


def clean_text(text):
    """清理文本"""
    text = strip_html_tags(text)
    # segments = text.split('!')
    segments = text.split('\t')
    clean_segments = [
        seg.strip() for seg in segments
        # if len(seg.strip()) <= 20 and seg.strip() and seg.strip() != '\n'
        if seg.strip() and seg.strip() != '\n'

    ]

    '''
    # 去除HTML标签
    text = re.sub(r'<[^>]+>', '', text)

    # 去除重复的换行符和多余的空格
    text = re.sub(r'\n{2,}', '\n', text)
    text = re.sub(r'\s+', ' ', text)

    # 分割文本并过滤（这里只是示例，您可能需要根据实际情况进行调整）
    segments = text.split('\n')
    clean_segments = [
        seg.strip() for seg in segments
        if len(seg.strip()) > 50 and seg.strip() != ''
    ]
    '''
    return clean_segments


def contains_word_from_set(word_set, sentence):
    # 将词组中的词用管道符(|)连接，创建正则表达式
    regex_pattern = '|'.join(word_set)
    # 使用正则表达式在句子中查找单词
    if re.search(regex_pattern, sentence):
        return True
    return False


def process_csv_to_json(input_csv_files, output_json_file):
    data = defaultdict(set)  # 使用set自动去重

    for input_csv in input_csv_files:
        with open(input_csv, 'r', encoding='utf-8') as f:
            reader = csv.DictReader(f, fieldnames=['company_name', 'url', 'text_content', 'text_connect'])
            next(reader, None)  # 跳过文件中的表头行

            for row in reader:
                company_name = row['company_name']
                text_content = row['text_content']

                # 去除文本中的 \n
                text_content = text_content.replace('\n', ' ')
                clean_segments = clean_text(text_content)
                if clean_segments:  # 只有在有有效内容时才添加
                    data[company_name].update(clean_segments)  # 使用update方法添加内容到set中，自动去重

                '''connection = row['text_connect']
                # 去除文本中的 \n
                text_content = text_content.replace('\n', ' ')
                clean_segments = clean_text(text_content)

                is_num = "0123456789"
                #is_inter = {"联系", "电话"}

                connect = clean_segments.copy()
                connect.clear()
                #text_connection.clear()
                for term in clean_segments:
                    num = 0  # 判断电话号
                    #inter = 0  # 联系关键字
                    for a in term:
                        if a in is_num:
                            num+=1
                    #if contains_word_from_set(is_inter, term):
                        #inter = 1
                    if num > 8:  #or inter == 1
                        print(term)
                        #data[text_connection].add(term)
                        connect.append(term)
                        #data[connection].update(connect)
                        clean_segments.remove(term)
                    if connect:
                        data[connection].update(connect)


                if clean_segments:  # 只有在有有效内容时才添加
                    data[company_name].update(clean_segments)  # 使用update方法添加内容到set中，自动去重

                print(data[connection])
                '''
    # 将合并后的数据写入JSON文件
    final_data = [
        {
            'company_name': company_name,
            #'connection': list(data[connection]),
            'clean_text_content': list(clean_segments)  # 将set转换为list
        }
        for company_name, clean_segments in data.items()
    ]

    with open(output_json_file, 'w', encoding='utf-8') as f:
        json.dump(final_data, f, ensure_ascii=False, indent=4)


if __name__ == "__main__":
    input_csv_files = [
        'D:/Courses/SmallThree/scrapy_spider/first_layer_links1.csv',
        'D:/Courses/SmallThree/scrapy_spider/second_layer_links1.csv'
    ]
    output_json_file = 'D:/Courses/SmallThree/scrapy_spider/cleaned_links1.json'

    process_csv_to_json(input_csv_files, output_json_file)