# encoding=utf8
import numpy as np
import os
import gc
import sys
import time
from tqdm import tqdm
from gensim.models import Word2Vec
import random
import networkx as nx

class DeepWalkProcessor:
    def __init__(self, file_path, emb_dim=128, num_walks=10, walk_length=80, window_size=10, workers=4):
        self.file_path = file_path
        self.emb_dim = emb_dim      
        self.num_walks = num_walks    
        self.walk_length = walk_length  
        self.window_size = window_size 
        self.workers = workers       
        self.node_dict = {}          
        self.num_nodes = 0          
        self.adj_list = []            
        self.timings = {}             

    def _collect_nodes(self):
        """快速节点收集"""

        start_time = time.time()
        nodes = set()
        
        with open(self.file_path, 'r') as f:
            for line in tqdm(f, desc="扫描文件", unit="行", mininterval=1):
                if line.strip() and not line.startswith('#'):
                    u, v = line.strip().split()[:2]
                    nodes.update([u, v])
        
        self.num_nodes = len(nodes)
        self.node_dict = {n: i for i, n in enumerate(sorted(nodes, key=lambda x: int(x) if x.isdigit() else x))}
        self.timings['phase1'] = time.time() - start_time

    def _build_adjacency(self):
        """流式构建邻接表"""

        start_time = time.time()
        self.adj_list = [[] for _ in range(self.num_nodes)]
        edge_cache = set()
        
        with open(self.file_path, 'r') as f:
            for line in tqdm(f, desc="处理边", unit="行", mininterval=1):
                if line.strip() and not line.startswith('#'):
                    parts = line.strip().split()
                    if len(parts) >= 2:
                        u, v = parts[0], parts[1]
                        edge_key = frozenset((u, v))
                        if edge_key not in edge_cache:
                            edge_cache.add(edge_key)
                            try:
                                i, j = self.node_dict[u], self.node_dict[v]
                                self.adj_list[i].append(j)
                                self.adj_list[j].append(i)
                            except KeyError:
                                pass
        
        for i in tqdm(range(self.num_nodes), desc="优化邻接表"):
            self.adj_list[i] = np.array(self.adj_list[i], dtype=np.int32)
        
        self.timings['phase2'] = time.time() - start_time

    def _generate_walks(self):
        """随机游走生成器（内存友好型）"""

        for _ in range(self.num_walks):
            for node_idx in range(self.num_nodes):
                walk = [node_idx]
                current_node = node_idx
                for _ in range(self.walk_length - 1):
                    neighbors = self.adj_list[current_node]
                    if len(neighbors) == 0:
                        break
                    current_node = np.random.choice(neighbors)
                    walk.append(current_node)
                yield list(map(str, walk))  

    def compute_embeddings(self):
        """主计算流程"""

        total_start = time.time()
        self._collect_nodes()
        self._build_adjacency()
        del self.node_dict
        gc.collect()

        walk_start = time.time()
        walk_iter = self._generate_walks()
        
        class WalkIterator:
            def __init__(self, generator):
                self.generator = generator
            def __iter__(self):
                return self.generator
        
        model_start = time.time()
        model = Word2Vec(
            sentences=WalkIterator(walk_iter),
            vector_size=self.emb_dim,
            window=self.window_size,
            sg=1,     
            hs=0,        
            negative=5,  
            workers=self.workers,
            min_count=0 
        )
        
        embeddings = np.zeros((self.num_nodes, self.emb_dim), dtype=np.float32)
        for i in range(self.num_nodes):
            embeddings[i] = model.wv[str(i)]
        
        self.timings['phase3'] = time.time() - walk_start
        self.timings['phase4'] = time.time() - model_start
        self.timings['total_compute'] = time.time() - total_start
        return embeddings

    def save_embeddings(self, embeddings, output_path):
        """分块保存嵌入结果"""
        
        start_time = time.time()
        
        with open(output_path, 'w') as f:
            f.write(f"{self.num_nodes} {self.emb_dim}\n")
            chunk_size = 100000
            for start in tqdm(range(0, self.num_nodes, chunk_size), desc="分块写入"):
                end = min(start + chunk_size, self.num_nodes)
                chunk = embeddings[start:end]
                lines = []
                for i in range(chunk.shape[0]):
                    line = f"{start + i} " + " ".join(f"{x:.6f}" for x in chunk[i]) + "\n"
                    lines.append(line)
                f.writelines(lines)
        
        self.timings['save'] = time.time() - start_time

if __name__ == "__main__":

    graph_name = "TWeibo"
    input_file = f"../to_csv/{graph_name}/{graph_name}.ungraph"
    output_file = f"../to_csv/{graph_name}/{graph_name}_DeepWalk.emb"

    try:
        total_start = time.time()
        processor = DeepWalkProcessor(
            input_file,
            emb_dim=128,
            num_walks=10,
            walk_length=80,
            window_size=10,
            workers=4
        )
        embeddings = processor.compute_embeddings()
        processor.save_embeddings(embeddings, output_file)

    except Exception as e:
        print(f"\n错误发生: {str(e)}")