import uuid
from langgraph.graph import StateGraph, START, END
from langgraph.checkpoint.memory import InMemorySaver
from typing import Annotated
from typing_extensions import TypedDict
from operator import add

from langgraph.store.memory import InMemoryStore

from langchain.embeddings import init_embeddings
from langchain_community.embeddings import ZhipuAIEmbeddings

from langchain_community.embeddings import ZhipuAIEmbeddings

from langgraph.checkpoint.memory import InMemorySaver
import os

os.environ["LANGGRAPH_AES_KEY"]="1"*16
# We need this because we want to enable threads (conversations)
checkpointer = InMemorySaver()

embeddings = ZhipuAIEmbeddings(
    model="embedding-2",
    api_key="f387f5e4837d4e4bba6d267682a957c9.PmPiTw8qVlsI2Oi5"
    # With the `embedding-3` class
    # of models, you can specify the size
    # of the embeddings you want returned.
    # dimensions=1024
)

 

in_memory_store = InMemoryStore(
    index={
        "embed": embeddings,  # Embedding provider
        "dims": 1536,                              # Embedding dimensions
        "fields": ["name"]              # Fields to embed
    }
)

import sqlite3

from langgraph.checkpoint.serde.encrypted import EncryptedSerializer
from langgraph.checkpoint.sqlite import SqliteSaver

#serde = EncryptedSerializer.from_pycryptodome_aes()  # reads LANGGRAPH_AES_KEY
checkpointer = SqliteSaver(sqlite3.connect("db.db"))





class InputState(TypedDict):
    user_input: str

class OutputState(TypedDict):
    graph_output: str

class OverallState(TypedDict):
    foo: str
    user_input: str
    graph_output: str

class PrivateState(TypedDict):
    bar: str

def node_1(state: InputState) -> OverallState:
    # Write to OverallState
    return {"foo": state["user_input"] + " name"}

def node_2(state: OverallState) -> PrivateState:
    # Read from OverallState, write to PrivateState
    return {"bar": state["foo"] + " is"}

def node_3(state: PrivateState) -> OutputState:
    # Read from PrivateState, write to OutputState
    return {"graph_output": state["bar"] + " Lance"}

builder = StateGraph(OverallState,input_schema=InputState,output_schema=OutputState,checkpointer=checkpointer)
builder.add_node("node_1", node_1)
builder.add_node("node_2", node_2)
builder.add_node("node_3", node_3)

builder.add_edge(START, "node_1")
builder.add_edge("node_1", "node_2")
builder.add_edge("node_2", "node_3")
builder.add_edge("node_3", END)

config = {"configurable": {"thread_id": "1"}}

graph = builder.compile()
output=graph.invoke({"user_input":"My"},config=config)
print(output)
output=graph.invoke({"user_input":"My"},config=config)
print(output)





'''
embeddings = ZhipuAIEmbeddings(
    model="embedding-2",
    api_key="f387f5e4837d4e4bba6d267682a957c9.PmPiTw8qVlsI2Oi5"
    # With the `embedding-3` class
    # of models, you can specify the size
    # of the embeddings you want returned.
    # dimensions=1024
)

 

in_memory_store = InMemoryStore(
    index={
        "embed": embeddings,  # Embedding provider
        "dims": 1536,                              # Embedding dimensions
        "fields": ["name"]              # Fields to embed
    }
)

user_id = "1"
namespace_for_memory = (user_id, "memories")

memory_id = str(uuid.uuid4())
memory = {"food_preference" : "I like pizza"}
in_memory_store.put(namespace_for_memory, memory_id, memory)

in_memory_store.put(
    namespace_for_memory,
    str(uuid.uuid4()),
    {
        "food_preference": "I love Italian cuisine",
        "context": "Discussing dinner plans"
    },
    index=["food_preference"]  # Only embed "food_preferences" field
)

in_memory_store.put(
    namespace_for_memory,
    str(uuid.uuid4()),
    {
        "food_preference": "i am fine ",
        "context": "Discussing dinner plans"
    },
    index=["food_preference"]  # Only embed "food_preferences" field
)

memories = in_memory_store.search(
    namespace_for_memory,
    query="how are you ?",
    limit=1  # Return top 3 matches
)

print(memories)




from langgraph.store.memory import InMemoryStore
in_memory_store = InMemoryStore()
user_id = "1"
namespace_for_memory = (user_id, "memories")

memory_id = str(uuid.uuid4())
memory = {"food_preference" : "I like pizza"}
in_memory_store.put(namespace_for_memory, memory_id, memory)

memories = in_memory_store.search(namespace_for_memory)
print(memories)



class State(TypedDict):
    foo: str
    bar: Annotated[list[str], add]

def node_a(state: State):
    return {"foo": "a", "bar": ["a"]}

def node_b(state: State):
    return {"foo": "b", "bar": ["b"]}


workflow = StateGraph(State)
workflow.add_node(node_a)
workflow.add_node(node_b)
workflow.add_edge(START, "node_a")
workflow.add_edge("node_a", "node_b")
workflow.add_edge("node_b", END)

checkpointer = InMemorySaver()
graph = workflow.compile(checkpointer=checkpointer)

config = {"configurable": {"thread_id": "1"}}
output=graph.invoke({"foo": ""}, config)
print(output)
output=graph.invoke({"foo": ""}, config)
print(output)

print("---------------------") 

config = {"configurable": {"thread_id": "1", "checkpoint_id": "0c62ca34-ac19-445d-bbb0-5b4984975b2a"}}
output=graph.invoke({"foo": ""}, config=config)
print("output:"+str( output))
print("------------sssssssss---------") 
'''