from py2neo import Graph, Node, Relationship, Transaction
import json
import os
from typing import List, Dict, Any
import logging
import time
import sys


class Neo4jDataImporter:
    def __init__(self, uri: str, username: str, password: str, batch_size: int = 100):
        """Initialize the importer
        Args:
            uri: Neo4j connection URI
            username: Database username
            password: Database password
            batch_size: Number of records per batch
        """
        self.graph = Graph(uri, auth=(username, password))
        self.batch_size = batch_size
        self._setup_logging()
        self.logger.info("Initializing importer")

    def _setup_logging(self):
        """Configure logging system"""
        logging.basicConfig(
            level=logging.INFO,
            format='%(asctime)s [%(levelname)s] %(message)s',
            handlers=[logging.StreamHandler()]
        )
        self.logger = logging.getLogger(__name__)

    def _setup_constraints(self):
        """Set database constraints"""
        constraints = [
            "CREATE CONSTRAINT IF NOT EXISTS FOR (r:Recipe) REQUIRE r.name IS UNIQUE",
            "CREATE CONSTRAINT IF NOT EXISTS FOR (i:Ingredient) REQUIRE i.name IS UNIQUE",
            "CREATE CONSTRAINT IF NOT EXISTS FOR (e:Effect) REQUIRE e.name IS UNIQUE",
            "CREATE CONSTRAINT IF NOT EXISTS FOR (sg:SuitableGroup) REQUIRE sg.description IS UNIQUE",
            "CREATE CONSTRAINT IF NOT EXISTS FOR (tg:TabooGroup) REQUIRE tg.description IS UNIQUE"
        ]

        for constraint in constraints:
            try:
                self.graph.run(constraint)
                self.logger.debug(f"Constraint created: {constraint}")
            except Exception as e:
                self.logger.warning(f"Failed to create constraint: {str(e)}")

    def _show_progress(self, current: int, total: int, start_time: float):
        """Display progress bar"""
        elapsed = time.time() - start_time
        percent = (current / total) * 100
        spinner = ['-', '\\', '|', '/'][current % 4]

        bar_length = 50
        filled = int(bar_length * current / total)
        bar = f"[{'=' * filled}{' ' * (bar_length - filled)}]"

        eta = f"ETA: {(elapsed / current * (total - current)):.1f}s" if current > 0 else ""
        sys.stdout.write(
            f"\r{spinner} Progress: {current}/{total} {percent:.1f}% {bar} "
            f"[{elapsed:.1f}s {eta}]"
        )
        sys.stdout.flush()

    def import_json_data(self, file_path: str):
        """Import JSONL format data"""
        if not os.path.exists(file_path):
            raise FileNotFoundError(f"File not found: {file_path}")

        self.logger.info(f"Importing: {file_path}")
        self._setup_constraints()

        with open(file_path, 'r', encoding='utf-8') as f:
            total = sum(1 for _ in f)

        processed = 0
        batch = []
        start_time = time.time()
        try:
            with open(file_path, 'r', encoding='utf-8') as f:
                for line in f:
                    try:
                        data = json.loads(line.strip())
                        batch.append(data)
                        processed += 1

                        if len(batch) >= self.batch_size:
                            self._process_batch(batch)
                            batch = []

                        self._show_progress(processed, total, start_time)
                    except json.JSONDecodeError as e:
                        self.logger.error(f"JSON decode error: {str(e)}\nLine: {line[:200]}")
                    except Exception as e:
                        self.logger.error(f"Processing error: {str(e)}\nLine: {line[:200]}")

                if batch:
                    self._process_batch(batch)

        finally:
            sys.stdout.write("\n")
            self.logger.info(
                f"Import completed! Processed {processed}/{total} records, "
                f"took {time.time()-start_time:.2f}s, "
                f"speed: {processed/(time.time()-start_time):.1f} rec/s"
            )

    def _process_batch(self, batch: List[Dict[str, Any]]):
        """Process a batch of data"""
        tx = self.graph.begin()
        try:
            for data in batch:
                self._process_recipe(tx, data)
            tx.commit()
        except Exception as e:
            tx.rollback()
            self.logger.error(f"Batch failed: {str(e)}")
            for data in batch:
                try:
                    self._process_recipe(self.graph, data)
                except Exception as e:
                    self.logger.error(f"Failed to process recipe: {data.get('name')} - {str(e)}")

    def _process_recipe(self, tx, data: Dict[str, Any]):
        """Process a single recipe"""
        recipe = Node(
            "Recipe",
            name=data["name"].strip(),
            image_url=data.get("img_url", "").strip()
        )
        tx.merge(recipe, "Recipe", "name")

        for effect in data.get("effects", []):
            if effect.strip():
                effect_node = Node("Effect", name=effect.strip())
                tx.merge(effect_node, "Effect", "name")
                tx.create(Relationship(recipe, "HAS_EFFECT", effect_node))

        for ingredient in data.get("ingredients", []):
            if isinstance(ingredient, str):
                if ":" in ingredient:
                    name, amount = ingredient.split(":", 1)
                else:
                    name, amount = ingredient, "适量"
            else:
                name, amount = str(ingredient), "适量"

            if name.strip():
                ingredient_node = Node("Ingredient", name=name.strip())
                tx.merge(ingredient_node, "Ingredient", "name")
                ingredient_node["amount"] = amount.strip()
                tx.push(ingredient_node)
                tx.create(Relationship(recipe, "USES_INGREDIENT", ingredient_node))

        for idx, step in enumerate(data.get("steps", []), 1):
            if step.strip():
                step_node = Node(
                    "Step",
                    description=step.strip(),
                    order=idx
                )
                tx.create(step_node)
                tx.create(Relationship(recipe, "HAS_STEP", step_node))

        for group in data.get("suitable_group", []):
            if group.strip():
                group_node = Node("SuitableGroup", description=group.strip())
                tx.merge(group_node, "SuitableGroup", "description")
                tx.create(Relationship(recipe, "SUITABLE_FOR", group_node))

        for group in data.get("taboo_group", []):
            if group.strip():
                group_node = Node("TabooGroup", description=group.strip())
                tx.merge(group_node, "TabooGroup", "description")
                tx.create(Relationship(recipe, "TABOO_FOR", group_node))
if __name__ == "__main__":
    try:
        config = {
            "uri": "bolt://localhost:7687",
            "user": "neo4j",
            "password": "12345678",
            "data_file": "caipu.jsonl"
        }

        importer = Neo4jDataImporter(
            config["uri"],
            config["user"],
            config["password"]
        )
        importer.import_json_data(config["data_file"])
    except Exception as e:
        logging.error(f"Error: {str(e)}", exc_info=True)
        exit(1)