

from typing import Any, Dict, List, Tuple

from llama_index.core.indices.list.base import SummaryIndex
from llama_index.core.indices.struct_store.sql import (
    SQLContextContainerBuilder,
    SQLStructStoreIndex,
)
from llama_index.core.indices.struct_store.sql_query import (
    NLStructStoreQueryEngine,
)
from llama_index.core.schema import (
    BaseNode,
    Document,
    NodeRelationship,
    QueryBundle,
    RelatedNodeInfo,
    TextNode,
)
from llama_index.core.utilities.sql_wrapper import SQLDatabase
from sqlalchemy import (
    Column,
    Integer,
    MetaData,
    String,
    Table,
    create_engine,
    delete,
    select,
)
from typing import List

from llama_index.core.agent.workflow import  FunctionAgent
from llama_index.core.indices.common.struct_store.sql import SQLStructDatapointExtractor
from llama_index.core.vector_stores import SimpleVectorStore
from llama_index.core.schema import  TextNode
from llama_index.core import Settings, SimpleKeywordTableIndex, SummaryIndex, get_response_synthesizer, \
    DocumentSummaryIndex
from llama_index.embeddings.zhipuai import ZhipuAIEmbedding
from llama_index.core.graph_stores import SimplePropertyGraphStore
from llama_index.core.schema import Document
from pydantic import BaseModel
from llama_index.core.indices.property_graph.base import PropertyGraphIndex
from llama_index.core.indices.property_graph.retriever import PGRetriever
from llama_index.core.indices.property_graph.sub_retrievers.base import BasePGRetriever
from llama_index.core.indices.property_graph.sub_retrievers.custom import (
    CustomPGRetriever,
    CUSTOM_RETRIEVE_TYPE,
)
from llama_index.core.indices.property_graph.sub_retrievers.cypher_template import (
    CypherTemplateRetriever,
)
from llama_index.core.indices.property_graph.sub_retrievers.llm_synonym import (
    LLMSynonymRetriever,
)
from llama_index.core.indices.property_graph.sub_retrievers.text_to_cypher import (
    TextToCypherRetriever,
)
from llama_index.core.indices.property_graph.sub_retrievers.vector import (
    VectorContextRetriever,
)
from llama_index.core.indices.property_graph.transformations.implicit import (
    ImplicitPathExtractor,
)
from llama_index.core.indices.property_graph.transformations.schema_llm import (
    SchemaLLMPathExtractor,
)
from llama_index.core.indices.property_graph.transformations.simple_llm import (
    SimpleLLMPathExtractor,
)
from llama_index.core.indices.property_graph.transformations.dynamic_llm import (
    DynamicLLMPathExtractor,
)
from llama_index.core.indices.property_graph.utils import default_parse_triplets_fn

embed_model = ZhipuAIEmbedding(
    model="embedding-2",
    api_key="f387f5e4837d4e4bba6d267682a957c9.PmPiTw8qVlsI2Oi5"
    # With the `embedding-3` class
    # of models, you can specify the size
    # of the embeddings you want returned.
    # dimensions=1024
)
Settings.embed_model=embed_model

from llama_index.llms.deepseek import DeepSeek

llm = DeepSeek(model="deepseek-chat", api_key="sk-605e60a1301040759a821b6b677556fb")
Settings.llm = llm
    # test setting table_context_dict
engine = create_engine("sqlite:///:memory:")
metadata_obj = MetaData()
table_name = "test_table"
Table(
    table_name,
    metadata_obj,
    Column("user_id", Integer, primary_key=True),
    Column("foo", String(16), nullable=False),
)
metadata_obj.create_all(engine)
# NOTE: we can use the default output parser for this

docs = [Document(text="user_id:2,foo:bar"), Document(text="user_id:8,foo:hello")]
sql_database = SQLDatabase(engine)
table_context_dict = {"test_table": "test_table_context"}

context_builder = SQLContextContainerBuilder(
    sql_database, context_dict=table_context_dict
)
context_index = context_builder.derive_index_from_context(
    SummaryIndex, ignore_db_schema=True
)
# NOTE: the response only contains the first line (metadata), since
# with the mock patch, newlines are treated as separate calls.
context_response = context_builder.query_index_for_context(
    context_index,
    "Context query?",
    query_tmpl="{orig_query_str}",
    store_context_str=True,
)
sql_context_container = context_builder.build_context_container(
    ignore_db_schema=True
)



index = SQLStructStoreIndex.from_documents(
    docs,
    sql_database=sql_database,
    table_name=table_name,
    sql_context_container=sql_context_container,
)
# just assert this runs
sql_query_engine = NLStructStoreQueryEngine(index)
nodes=sql_query_engine.query(QueryBundle("test_table:foo"))

print(nodes)

print("L---------")