from typing import Union,Optional,Iterator,TypedDict,Any,Literal
from langchain_core.embeddings import Embeddings

class EmbeddingModelDefine():
  name:str
  embedding_chunk_size:int
  split_chunk_size:int
  dimensions:int
  batch_size:int
  base_url:str
  api_key:str
  def __init__(
    self,name:str,
    embedding_chunk_size:int,
    split_chunk_size:int,
    dimensions:int,
    batch_size:int,
    base_url:str,
    api_key:str
  ):
    self.name=name
    self.embedding_chunk_size=embedding_chunk_size
    self.split_chunk_size=split_chunk_size
    self.dimensions=dimensions
    self.batch_size=batch_size
    self.base_url=base_url
    self.api_key=api_key


class ChatModelDefine():
  name:str
  base_url:str
  api_key:str
  def __init__(
    self,name:str,
    base_url:str,
    api_key:str
  ):
    self.name=name
    self.base_url=base_url
    self.api_key=api_key


base_url:str='https://api.siliconflow.cn/v1'
api_key:str='sk-nwynnrrpijemdypjusudsmedtpljbostwqwfvhjyuodkfzvj'

embeddings_models:dict[str,EmbeddingModelDefine]={
  'BAAI/bge-m3':EmbeddingModelDefine(
    name='BAAI/bge-m3',
    embedding_chunk_size=8,
    split_chunk_size=1000,
    dimensions=1024,
    batch_size=64,
    base_url=base_url,
    api_key=api_key,
  ),
  'netease-youdao/bce-embedding-base_v1':EmbeddingModelDefine(
    name='netease-youdao/bce-embedding-base_v1',
    embedding_chunk_size=5,
    split_chunk_size=100,
    dimensions=768,
    batch_size=32,
    base_url=base_url,
    api_key=api_key,
  ),
}

class DefaultModelConfig():
  def __init__(self):
    self.embeddings_model_name='BAAI/bge-m3'
    self.chat_model_name='Qwen/Qwen2.5-7B-Instruct'
default_models_config:DefaultModelConfig=DefaultModelConfig()

chat_models:dict[str,ChatModelDefine]={
  'Qwen/Qwen2.5-7B-Instruct':ChatModelDefine(
    name='Qwen/Qwen2.5-7B-Instruct',
    base_url=base_url,
    api_key=api_key,
  )
}

embedding_model_define:EmbeddingModelDefine=embeddings_models[default_models_config.embeddings_model_name]

## using SiliconFlow
def get_embedding_model_default()->Embeddings:
  from langchain_openai import OpenAIEmbeddings
  embedding=OpenAIEmbeddings(
    model=embedding_model_define.name,
    base_url=embedding_model_define.base_url,
    api_key=embedding_model_define.api_key,
    chunk_size=embedding_model_define.embedding_chunk_size,
  )
  return embedding

from langchain_core.language_models import BaseChatModel
from langchain.chat_models import init_chat_model
def get_chat_model_default()->BaseChatModel:
  chat_model_define:ChatModelDefine=chat_models[default_models_config.chat_model_name]
  llm:BaseChatModel=init_chat_model(
    chat_model_define.name,
    model_provider='openai',
    base_url=chat_model_define.base_url,
    api_key=chat_model_define.api_key,
  )
  return llm
