ishaq101 commited on
Commit
e22b3b4
·
1 Parent(s): 06067fa

[NOTICKET] Fix langfuse version, testing langfuse

Browse files
config/constant.py CHANGED
@@ -29,6 +29,12 @@ class AzureBlobConstants:
29
  CHUNK_SIZE: int = 4 * 1024 * 1024
30
  ACCOUNT_NAME: str = os.environ.get("azureai__container__account__name")
31
 
 
 
 
 
 
 
32
  @dataclass
33
  class ProfileFieldTypes:
34
  NUMERIC: List[str] = field(default_factory=lambda: ["gpa_edu_1", "gpa_edu_2", "gpa_edu_3", "yoe"])
 
29
  CHUNK_SIZE: int = 4 * 1024 * 1024
30
  ACCOUNT_NAME: str = os.environ.get("azureai__container__account__name")
31
 
32
+ @dataclass(frozen=True)
33
+ class LangfuseConstants:
34
+ SECRET_KEY: str = os.environ.get("ss__langfuse__secret_key")
35
+ PUBLIC_KEY: str = os.environ.get("ss__langfuse__public_key")
36
+ HOST: str = os.environ.get("langfuse__host")
37
+
38
  @dataclass
39
  class ProfileFieldTypes:
40
  NUMERIC: List[str] = field(default_factory=lambda: ["gpa_edu_1", "gpa_edu_2", "gpa_edu_3", "yoe"])
externals/databases/database.py CHANGED
@@ -13,10 +13,10 @@ DATABASE_URL = EnvPostgresConstants.CONSTRING.\
13
 
14
  engine = create_async_engine(
15
  DATABASE_URL,
16
- echo=False,
17
- future=True,
18
  pool_pre_ping=True,
19
  pool_recycle=300,
 
 
20
  )
21
 
22
  AsyncSessionLocal = async_sessionmaker(
 
13
 
14
  engine = create_async_engine(
15
  DATABASE_URL,
 
 
16
  pool_pre_ping=True,
17
  pool_recycle=300,
18
+ pool_size=5,
19
+ max_overflow=10,
20
  )
21
 
22
  AsyncSessionLocal = async_sessionmaker(
pyproject.toml CHANGED
@@ -15,7 +15,7 @@ dependencies = [
15
  "frontend>=0.0.3",
16
  "langchain>=1.2.0",
17
  "langchain-openai>=1.1.6",
18
- "langfuse==2.33.0",
19
  "pandas>=2.3.3",
20
  "passlib==1.7.4",
21
  "pdf2image>=1.17.0",
 
15
  "frontend>=0.0.3",
16
  "langchain>=1.2.0",
17
  "langchain-openai>=1.1.6",
18
+ "langfuse>=3.0.0",
19
  "pandas>=2.3.3",
20
  "passlib==1.7.4",
21
  "pdf2image>=1.17.0",
requirements.txt CHANGED
@@ -91,6 +91,8 @@ frozenlist==1.8.0
91
  # via
92
  # aiohttp
93
  # aiosignal
 
 
94
  greenlet==3.3.1
95
  # via sqlalchemy
96
  h11==0.16.0
@@ -114,9 +116,10 @@ idna==3.11
114
  # anyio
115
  # email-validator
116
  # httpx
117
- # langfuse
118
  # requests
119
  # yarl
 
 
120
  isodate==0.7.2
121
  # via azure-storage-blob
122
  itsdangerous==2.2.0
@@ -140,7 +143,7 @@ langchain-core==1.2.14
140
  # langgraph-prebuilt
141
  langchain-openai==1.1.10
142
  # via service-explorer-backend (pyproject.toml)
143
- langfuse==2.33.0
144
  # via service-explorer-backend (pyproject.toml)
145
  langgraph==1.0.9
146
  # via langchain
@@ -173,7 +176,29 @@ multidict==6.7.1
173
  numpy==2.4.2
174
  # via pandas
175
  openai==2.21.0
176
- # via langchain-openai
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
177
  orjson==3.11.7
178
  # via
179
  # langgraph-sdk
@@ -200,6 +225,10 @@ propcache==0.4.1
200
  # via
201
  # aiohttp
202
  # yarl
 
 
 
 
203
  psycopg2==2.9.11
204
  # via service-explorer-backend (pyproject.toml)
205
  pyasn1==0.6.2
@@ -260,8 +289,10 @@ regex==2026.2.19
260
  requests==2.32.5
261
  # via
262
  # azure-core
 
263
  # langsmith
264
  # msal
 
265
  # requests-toolbelt
266
  # tiktoken
267
  requests-toolbelt==1.0.0
@@ -312,6 +343,10 @@ typing-extensions==4.15.0
312
  # fastapi
313
  # langchain-core
314
  # openai
 
 
 
 
315
  # pydantic
316
  # pydantic-core
317
  # pydantic-extra-types
@@ -352,5 +387,7 @@ xxhash==3.6.0
352
  # langsmith
353
  yarl==1.22.0
354
  # via aiohttp
 
 
355
  zstandard==0.25.0
356
  # via langsmith
 
91
  # via
92
  # aiohttp
93
  # aiosignal
94
+ googleapis-common-protos==1.72.0
95
+ # via opentelemetry-exporter-otlp-proto-http
96
  greenlet==3.3.1
97
  # via sqlalchemy
98
  h11==0.16.0
 
116
  # anyio
117
  # email-validator
118
  # httpx
 
119
  # requests
120
  # yarl
121
+ importlib-metadata==8.7.1
122
+ # via opentelemetry-api
123
  isodate==0.7.2
124
  # via azure-storage-blob
125
  itsdangerous==2.2.0
 
143
  # langgraph-prebuilt
144
  langchain-openai==1.1.10
145
  # via service-explorer-backend (pyproject.toml)
146
+ langfuse==3.14.5
147
  # via service-explorer-backend (pyproject.toml)
148
  langgraph==1.0.9
149
  # via langchain
 
176
  numpy==2.4.2
177
  # via pandas
178
  openai==2.21.0
179
+ # via
180
+ # langchain-openai
181
+ # langfuse
182
+ opentelemetry-api==1.39.1
183
+ # via
184
+ # langfuse
185
+ # opentelemetry-exporter-otlp-proto-http
186
+ # opentelemetry-sdk
187
+ # opentelemetry-semantic-conventions
188
+ opentelemetry-exporter-otlp-proto-common==1.39.1
189
+ # via opentelemetry-exporter-otlp-proto-http
190
+ opentelemetry-exporter-otlp-proto-http==1.39.1
191
+ # via langfuse
192
+ opentelemetry-proto==1.39.1
193
+ # via
194
+ # opentelemetry-exporter-otlp-proto-common
195
+ # opentelemetry-exporter-otlp-proto-http
196
+ opentelemetry-sdk==1.39.1
197
+ # via
198
+ # langfuse
199
+ # opentelemetry-exporter-otlp-proto-http
200
+ opentelemetry-semantic-conventions==0.60b1
201
+ # via opentelemetry-sdk
202
  orjson==3.11.7
203
  # via
204
  # langgraph-sdk
 
225
  # via
226
  # aiohttp
227
  # yarl
228
+ protobuf==6.33.5
229
+ # via
230
+ # googleapis-common-protos
231
+ # opentelemetry-proto
232
  psycopg2==2.9.11
233
  # via service-explorer-backend (pyproject.toml)
234
  pyasn1==0.6.2
 
289
  requests==2.32.5
290
  # via
291
  # azure-core
292
+ # langfuse
293
  # langsmith
294
  # msal
295
+ # opentelemetry-exporter-otlp-proto-http
296
  # requests-toolbelt
297
  # tiktoken
298
  requests-toolbelt==1.0.0
 
343
  # fastapi
344
  # langchain-core
345
  # openai
346
+ # opentelemetry-api
347
+ # opentelemetry-exporter-otlp-proto-http
348
+ # opentelemetry-sdk
349
+ # opentelemetry-semantic-conventions
350
  # pydantic
351
  # pydantic-core
352
  # pydantic-extra-types
 
387
  # langsmith
388
  yarl==1.22.0
389
  # via aiohttp
390
+ zipp==3.23.0
391
+ # via importlib-metadata
392
  zstandard==0.25.0
393
  # via langsmith
services/base/BaseGenerator.py CHANGED
@@ -9,76 +9,71 @@ from tenacity import (
9
  )
10
  from typing import Dict
11
 
12
- from externals.observability.langfuse import langfuse_handler, langfuse
13
  from services.llms.LLM import model_5mini, model_4omini
14
  from utils.decorator import trace_runtime
15
  from utils.logger import get_logger
16
 
17
  logger = get_logger("base generator")
18
 
 
19
  class MetadataObservability(BaseModel):
20
  fullname: str
21
  task_id: str
22
  agent: str
23
 
 
24
  class BaseAIGenerator:
25
- """
26
- Args:
27
- name:str,
28
- prompt: ChatPromptTemplate,
29
- input_llm: Dict,
30
- metadata_observability: MetadataObservability,
31
- output_model: BaseModel,
32
- llm:AzureChatOpenAI = model_5mini | model_4omini,
33
- """
34
- def __init__(self,
35
- task_name:str,
36
  prompt: ChatPromptTemplate,
37
  input_llm: Dict,
38
  metadata_observability: MetadataObservability,
39
- llm:AzureChatOpenAI = model_5mini | model_4omini,
40
  ):
41
  self.name = task_name
42
- self.llm = llm
43
  self.prompt = prompt
44
  self.input_llm = input_llm
45
  self.metadata_observability = metadata_observability
46
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  @retry(
48
  reraise=True,
49
- stop=stop_after_attempt(2), # retry max 3 times
50
- wait=wait_exponential(multiplier=1, min=1, max=5),
51
- retry=retry_if_exception_type(Exception) # retry on any exception from LLM
52
  )
53
  async def _asafe_invoke(self, chain, input_llm, config):
54
- """private helper for retries"""
55
  return await chain.ainvoke(input_llm, config=config)
56
-
57
  @retry(
58
  reraise=True,
59
- stop=stop_after_attempt(2), # retry max 3 times
60
- wait=wait_exponential(multiplier=1, min=1, max=5),
61
- retry=retry_if_exception_type(Exception) # retry on any exception from LLM
62
  )
63
  async def _safe_invoke(self, chain, input_llm, config):
64
- """private helper for retries"""
65
  return chain.invoke(input_llm, config=config)
66
-
67
  @trace_runtime
68
  async def agenerate(self):
69
- trace = None
70
  try:
71
- # Create trace (no context manager, no end())
72
- trace = langfuse.trace(
73
- name=self.name,
74
- input=self.input_llm,
75
- user_id=self.metadata_observability.fullname,
76
- session_id=self.metadata_observability.task_id,
77
- metadata=self.metadata_observability.model_dump(),
78
- )
79
-
80
- handler = trace.get_langchain_handler()
81
- config = {"callbacks": [handler]}
82
  chain = self.prompt | self.llm
83
 
84
  output = await self._asafe_invoke(
@@ -86,37 +81,17 @@ class BaseAIGenerator:
86
  input_llm=self.input_llm,
87
  config=config,
88
  )
89
-
90
- trace.update(output=output)
91
-
92
  return output
93
 
94
  except Exception as e:
95
  logger.exception("❌ BaseGenerator agenerate error")
96
-
97
- if trace:
98
- trace.update(
99
- status="error",
100
- error=str(e),
101
- )
102
-
103
  return None
104
 
105
  @trace_runtime
106
  async def generate(self):
107
- trace = None
108
  try:
109
-
110
- trace = langfuse.trace(
111
- name=self.name,
112
- input=self.input_llm,
113
- user_id=self.metadata_observability.fullname,
114
- session_id=self.metadata_observability.task_id,
115
- metadata=self.metadata_observability.model_dump(),
116
- )
117
-
118
- handler = trace.get_langchain_handler()
119
- config = {"callbacks": [handler]}
120
  chain = self.prompt | self.llm
121
 
122
  output = self._safe_invoke(
@@ -124,20 +99,8 @@ class BaseAIGenerator:
124
  input_llm=self.input_llm,
125
  config=config,
126
  )
127
-
128
- trace.update(output=output)
129
-
130
  return output
131
 
132
  except Exception as e:
133
  logger.exception("❌ BaseGenerator generate error")
134
-
135
- if trace:
136
- trace.update(
137
- status="error",
138
- error=str(e),
139
- )
140
-
141
- return None
142
-
143
-
 
9
  )
10
  from typing import Dict
11
 
12
+ # ❌ REMOVED: from externals.observability.langfuse import langfuse_handler, langfuse
13
  from services.llms.LLM import model_5mini, model_4omini
14
  from utils.decorator import trace_runtime
15
  from utils.logger import get_logger
16
 
17
  logger = get_logger("base generator")
18
 
19
+
20
  class MetadataObservability(BaseModel):
21
  fullname: str
22
  task_id: str
23
  agent: str
24
 
25
+
26
  class BaseAIGenerator:
27
+ def __init__(self,
28
+ task_name: str,
 
 
 
 
 
 
 
 
 
29
  prompt: ChatPromptTemplate,
30
  input_llm: Dict,
31
  metadata_observability: MetadataObservability,
32
+ llm: AzureChatOpenAI = model_5mini | model_4omini,
33
  ):
34
  self.name = task_name
35
+ self.llm = llm
36
  self.prompt = prompt
37
  self.input_llm = input_llm
38
  self.metadata_observability = metadata_observability
39
 
40
+ def _get_langfuse_handler(self):
41
+ try:
42
+ import os
43
+ from config.constant import LangfuseConstants # adjust import path if needed
44
+ os.environ["LANGFUSE_PUBLIC_KEY"] = LangfuseConstants.PUBLIC_KEY
45
+ os.environ["LANGFUSE_SECRET_KEY"] = LangfuseConstants.SECRET_KEY
46
+ os.environ["LANGFUSE_HOST"] = LangfuseConstants.HOST or "https://us.cloud.langfuse.com"
47
+
48
+ from langfuse.langchain import CallbackHandler
49
+ return CallbackHandler()
50
+ except Exception as e:
51
+ logger.warning(f"⚠️ Langfuse unavailable, skipping observability: {e}")
52
+ return None
53
+
54
  @retry(
55
  reraise=True,
56
+ stop=stop_after_attempt(2),
57
+ wait=wait_exponential(multiplier=1, min=1, max=5),
58
+ retry=retry_if_exception_type(Exception)
59
  )
60
  async def _asafe_invoke(self, chain, input_llm, config):
 
61
  return await chain.ainvoke(input_llm, config=config)
62
+
63
  @retry(
64
  reraise=True,
65
+ stop=stop_after_attempt(2),
66
+ wait=wait_exponential(multiplier=1, min=1, max=5),
67
+ retry=retry_if_exception_type(Exception)
68
  )
69
  async def _safe_invoke(self, chain, input_llm, config):
 
70
  return chain.invoke(input_llm, config=config)
71
+
72
  @trace_runtime
73
  async def agenerate(self):
 
74
  try:
75
+ handler = self._get_langfuse_handler()
76
+ config = {"callbacks": [handler]} if handler else {}
 
 
 
 
 
 
 
 
 
77
  chain = self.prompt | self.llm
78
 
79
  output = await self._asafe_invoke(
 
81
  input_llm=self.input_llm,
82
  config=config,
83
  )
 
 
 
84
  return output
85
 
86
  except Exception as e:
87
  logger.exception("❌ BaseGenerator agenerate error")
 
 
 
 
 
 
 
88
  return None
89
 
90
  @trace_runtime
91
  async def generate(self):
 
92
  try:
93
+ handler = self._get_langfuse_handler()
94
+ config = {"callbacks": [handler]} if handler else {}
 
 
 
 
 
 
 
 
 
95
  chain = self.prompt | self.llm
96
 
97
  output = self._safe_invoke(
 
99
  input_llm=self.input_llm,
100
  config=config,
101
  )
 
 
 
102
  return output
103
 
104
  except Exception as e:
105
  logger.exception("❌ BaseGenerator generate error")
106
+ return None
 
 
 
 
 
 
 
 
 
utils/decorator.py CHANGED
@@ -44,9 +44,7 @@ def retry_db(
44
  delay: float = 2.0,
45
  backoff: float = 2.0,
46
  ) -> Callable:
47
-
48
  def decorator(func: Callable) -> Callable:
49
-
50
  @wraps(func)
51
  async def async_wrapper(*args, **kwargs) -> Any:
52
  current_delay = delay
@@ -58,12 +56,12 @@ def retry_db(
58
  except (
59
  OperationalError,
60
  InterfaceError,
 
61
  asyncpg.exceptions.PostgresConnectionError,
62
  asyncpg.exceptions.CannotConnectNowError,
63
  ConnectionError,
64
  TimeoutError,
65
  ) as e:
66
-
67
  if attempt == retries:
68
  raise
69
 
@@ -72,9 +70,16 @@ def retry_db(
72
  f"after {current_delay:.2f}s due to: {type(e).__name__}"
73
  )
74
 
 
 
 
 
 
 
 
 
75
  await asyncio.sleep(current_delay)
76
  current_delay *= backoff
77
 
78
  return async_wrapper
79
-
80
  return decorator
 
44
  delay: float = 2.0,
45
  backoff: float = 2.0,
46
  ) -> Callable:
 
47
  def decorator(func: Callable) -> Callable:
 
48
  @wraps(func)
49
  async def async_wrapper(*args, **kwargs) -> Any:
50
  current_delay = delay
 
56
  except (
57
  OperationalError,
58
  InterfaceError,
59
+ PendingRollbackError, # 👈 Add this
60
  asyncpg.exceptions.PostgresConnectionError,
61
  asyncpg.exceptions.CannotConnectNowError,
62
  ConnectionError,
63
  TimeoutError,
64
  ) as e:
 
65
  if attempt == retries:
66
  raise
67
 
 
70
  f"after {current_delay:.2f}s due to: {type(e).__name__}"
71
  )
72
 
73
+ # 👇 Roll back the broken session before retrying
74
+ db = args[0] if args else kwargs.get("db")
75
+ if db is not None:
76
+ try:
77
+ await db.rollback()
78
+ except Exception:
79
+ pass # If rollback itself fails, just continue
80
+
81
  await asyncio.sleep(current_delay)
82
  current_delay *= backoff
83
 
84
  return async_wrapper
 
85
  return decorator
uv.lock CHANGED
@@ -678,6 +678,18 @@ wheels = [
678
  { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" },
679
  ]
680
 
 
 
 
 
 
 
 
 
 
 
 
 
681
  [[package]]
682
  name = "greenlet"
683
  version = "3.3.0"
@@ -777,6 +789,18 @@ wheels = [
777
  { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
778
  ]
779
 
 
 
 
 
 
 
 
 
 
 
 
 
780
  [[package]]
781
  name = "isodate"
782
  version = "0.7.2"
@@ -928,19 +952,23 @@ wheels = [
928
 
929
  [[package]]
930
  name = "langfuse"
931
- version = "2.33.0"
932
  source = { registry = "https://pypi.org/simple" }
933
  dependencies = [
934
  { name = "backoff" },
935
  { name = "httpx" },
936
- { name = "idna" },
 
 
 
937
  { name = "packaging" },
938
  { name = "pydantic" },
 
939
  { name = "wrapt" },
940
  ]
941
- sdist = { url = "https://files.pythonhosted.org/packages/04/09/98b6245b833beaf797cfc7ba97917dcc19f34209b8ff56a55a43a458420e/langfuse-2.33.0.tar.gz", hash = "sha256:3ca2ef8539a8f28cb80135f4b46b80d5585ce183f8e2035f318be296d09d7d88", size = 87438, upload-time = "2024-05-21T15:55:18.441Z" }
942
  wheels = [
943
- { url = "https://files.pythonhosted.org/packages/92/9e/112aa6b3a246cfe14d92323de7f3d59fb8b4cf7d8f1e1eebfa3d0e07e35f/langfuse-2.33.0-py3-none-any.whl", hash = "sha256:362e3078c5a891df0b7ba3c9ce82f046d1f0274eab3d55337e443fff526f18ad", size = 162440, upload-time = "2024-05-21T15:55:14.894Z" },
944
  ]
945
 
946
  [[package]]
@@ -1267,6 +1295,88 @@ wheels = [
1267
  { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" },
1268
  ]
1269
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1270
  [[package]]
1271
  name = "orjson"
1272
  version = "3.11.5"
@@ -1531,6 +1641,21 @@ wheels = [
1531
  { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" },
1532
  ]
1533
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1534
  [[package]]
1535
  name = "psycopg2"
1536
  version = "2.9.11"
@@ -2060,7 +2185,7 @@ requires-dist = [
2060
  { name = "frontend", specifier = ">=0.0.3" },
2061
  { name = "langchain", specifier = ">=1.2.0" },
2062
  { name = "langchain-openai", specifier = ">=1.1.6" },
2063
- { name = "langfuse", specifier = "==2.33.0" },
2064
  { name = "pandas", specifier = ">=2.3.3" },
2065
  { name = "passlib", specifier = "==1.7.4" },
2066
  { name = "pdf2image", specifier = ">=1.17.0" },
@@ -2592,6 +2717,15 @@ wheels = [
2592
  { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" },
2593
  ]
2594
 
 
 
 
 
 
 
 
 
 
2595
  [[package]]
2596
  name = "zstandard"
2597
  version = "0.25.0"
 
678
  { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" },
679
  ]
680
 
681
+ [[package]]
682
+ name = "googleapis-common-protos"
683
+ version = "1.72.0"
684
+ source = { registry = "https://pypi.org/simple" }
685
+ dependencies = [
686
+ { name = "protobuf" },
687
+ ]
688
+ sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" }
689
+ wheels = [
690
+ { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" },
691
+ ]
692
+
693
  [[package]]
694
  name = "greenlet"
695
  version = "3.3.0"
 
789
  { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
790
  ]
791
 
792
+ [[package]]
793
+ name = "importlib-metadata"
794
+ version = "8.7.1"
795
+ source = { registry = "https://pypi.org/simple" }
796
+ dependencies = [
797
+ { name = "zipp" },
798
+ ]
799
+ sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" }
800
+ wheels = [
801
+ { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" },
802
+ ]
803
+
804
  [[package]]
805
  name = "isodate"
806
  version = "0.7.2"
 
952
 
953
  [[package]]
954
  name = "langfuse"
955
+ version = "3.14.5"
956
  source = { registry = "https://pypi.org/simple" }
957
  dependencies = [
958
  { name = "backoff" },
959
  { name = "httpx" },
960
+ { name = "openai" },
961
+ { name = "opentelemetry-api" },
962
+ { name = "opentelemetry-exporter-otlp-proto-http" },
963
+ { name = "opentelemetry-sdk" },
964
  { name = "packaging" },
965
  { name = "pydantic" },
966
+ { name = "requests" },
967
  { name = "wrapt" },
968
  ]
969
+ sdist = { url = "https://files.pythonhosted.org/packages/ec/6b/7a945e8bc56cbf343b6f6171fd45870b0ea80ea38463b2db8dd5a9dc04a2/langfuse-3.14.5.tar.gz", hash = "sha256:2f543ec1540053d39b08a50ed5992caf1cd54d472a55cb8e5dcf6d4fcb7ff631", size = 235474, upload-time = "2026-02-23T10:42:47.721Z" }
970
  wheels = [
971
+ { url = "https://files.pythonhosted.org/packages/a3/a1/10f04224542d6a57073c4f339b6763836a0899c98966f1d4ffcf56d2cf61/langfuse-3.14.5-py3-none-any.whl", hash = "sha256:5054b1c705ec69bce2d7077ce7419727ac629159428da013790979ca9cae77d5", size = 421240, upload-time = "2026-02-23T10:42:46.085Z" },
972
  ]
973
 
974
  [[package]]
 
1295
  { url = "https://files.pythonhosted.org/packages/27/4b/7c1a00c2c3fbd004253937f7520f692a9650767aa73894d7a34f0d65d3f4/openai-2.14.0-py3-none-any.whl", hash = "sha256:7ea40aca4ffc4c4a776e77679021b47eec1160e341f42ae086ba949c9dcc9183", size = 1067558, upload-time = "2025-12-19T03:28:43.727Z" },
1296
  ]
1297
 
1298
+ [[package]]
1299
+ name = "opentelemetry-api"
1300
+ version = "1.39.1"
1301
+ source = { registry = "https://pypi.org/simple" }
1302
+ dependencies = [
1303
+ { name = "importlib-metadata" },
1304
+ { name = "typing-extensions" },
1305
+ ]
1306
+ sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" }
1307
+ wheels = [
1308
+ { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" },
1309
+ ]
1310
+
1311
+ [[package]]
1312
+ name = "opentelemetry-exporter-otlp-proto-common"
1313
+ version = "1.39.1"
1314
+ source = { registry = "https://pypi.org/simple" }
1315
+ dependencies = [
1316
+ { name = "opentelemetry-proto" },
1317
+ ]
1318
+ sdist = { url = "https://files.pythonhosted.org/packages/e9/9d/22d241b66f7bbde88a3bfa6847a351d2c46b84de23e71222c6aae25c7050/opentelemetry_exporter_otlp_proto_common-1.39.1.tar.gz", hash = "sha256:763370d4737a59741c89a67b50f9e39271639ee4afc999dadfe768541c027464", size = 20409, upload-time = "2025-12-11T13:32:40.885Z" }
1319
+ wheels = [
1320
+ { url = "https://files.pythonhosted.org/packages/8c/02/ffc3e143d89a27ac21fd557365b98bd0653b98de8a101151d5805b5d4c33/opentelemetry_exporter_otlp_proto_common-1.39.1-py3-none-any.whl", hash = "sha256:08f8a5862d64cc3435105686d0216c1365dc5701f86844a8cd56597d0c764fde", size = 18366, upload-time = "2025-12-11T13:32:20.2Z" },
1321
+ ]
1322
+
1323
+ [[package]]
1324
+ name = "opentelemetry-exporter-otlp-proto-http"
1325
+ version = "1.39.1"
1326
+ source = { registry = "https://pypi.org/simple" }
1327
+ dependencies = [
1328
+ { name = "googleapis-common-protos" },
1329
+ { name = "opentelemetry-api" },
1330
+ { name = "opentelemetry-exporter-otlp-proto-common" },
1331
+ { name = "opentelemetry-proto" },
1332
+ { name = "opentelemetry-sdk" },
1333
+ { name = "requests" },
1334
+ { name = "typing-extensions" },
1335
+ ]
1336
+ sdist = { url = "https://files.pythonhosted.org/packages/80/04/2a08fa9c0214ae38880df01e8bfae12b067ec0793446578575e5080d6545/opentelemetry_exporter_otlp_proto_http-1.39.1.tar.gz", hash = "sha256:31bdab9745c709ce90a49a0624c2bd445d31a28ba34275951a6a362d16a0b9cb", size = 17288, upload-time = "2025-12-11T13:32:42.029Z" }
1337
+ wheels = [
1338
+ { url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641, upload-time = "2025-12-11T13:32:22.248Z" },
1339
+ ]
1340
+
1341
+ [[package]]
1342
+ name = "opentelemetry-proto"
1343
+ version = "1.39.1"
1344
+ source = { registry = "https://pypi.org/simple" }
1345
+ dependencies = [
1346
+ { name = "protobuf" },
1347
+ ]
1348
+ sdist = { url = "https://files.pythonhosted.org/packages/49/1d/f25d76d8260c156c40c97c9ed4511ec0f9ce353f8108ca6e7561f82a06b2/opentelemetry_proto-1.39.1.tar.gz", hash = "sha256:6c8e05144fc0d3ed4d22c2289c6b126e03bcd0e6a7da0f16cedd2e1c2772e2c8", size = 46152, upload-time = "2025-12-11T13:32:48.681Z" }
1349
+ wheels = [
1350
+ { url = "https://files.pythonhosted.org/packages/51/95/b40c96a7b5203005a0b03d8ce8cd212ff23f1793d5ba289c87a097571b18/opentelemetry_proto-1.39.1-py3-none-any.whl", hash = "sha256:22cdc78efd3b3765d09e68bfbd010d4fc254c9818afd0b6b423387d9dee46007", size = 72535, upload-time = "2025-12-11T13:32:33.866Z" },
1351
+ ]
1352
+
1353
+ [[package]]
1354
+ name = "opentelemetry-sdk"
1355
+ version = "1.39.1"
1356
+ source = { registry = "https://pypi.org/simple" }
1357
+ dependencies = [
1358
+ { name = "opentelemetry-api" },
1359
+ { name = "opentelemetry-semantic-conventions" },
1360
+ { name = "typing-extensions" },
1361
+ ]
1362
+ sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" }
1363
+ wheels = [
1364
+ { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" },
1365
+ ]
1366
+
1367
+ [[package]]
1368
+ name = "opentelemetry-semantic-conventions"
1369
+ version = "0.60b1"
1370
+ source = { registry = "https://pypi.org/simple" }
1371
+ dependencies = [
1372
+ { name = "opentelemetry-api" },
1373
+ { name = "typing-extensions" },
1374
+ ]
1375
+ sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" }
1376
+ wheels = [
1377
+ { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" },
1378
+ ]
1379
+
1380
  [[package]]
1381
  name = "orjson"
1382
  version = "3.11.5"
 
1641
  { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" },
1642
  ]
1643
 
1644
+ [[package]]
1645
+ name = "protobuf"
1646
+ version = "6.33.5"
1647
+ source = { registry = "https://pypi.org/simple" }
1648
+ sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" }
1649
+ wheels = [
1650
+ { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" },
1651
+ { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" },
1652
+ { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" },
1653
+ { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" },
1654
+ { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" },
1655
+ { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" },
1656
+ { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" },
1657
+ ]
1658
+
1659
  [[package]]
1660
  name = "psycopg2"
1661
  version = "2.9.11"
 
2185
  { name = "frontend", specifier = ">=0.0.3" },
2186
  { name = "langchain", specifier = ">=1.2.0" },
2187
  { name = "langchain-openai", specifier = ">=1.1.6" },
2188
+ { name = "langfuse", specifier = ">=3.0.0" },
2189
  { name = "pandas", specifier = ">=2.3.3" },
2190
  { name = "passlib", specifier = "==1.7.4" },
2191
  { name = "pdf2image", specifier = ">=1.17.0" },
 
2717
  { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" },
2718
  ]
2719
 
2720
+ [[package]]
2721
+ name = "zipp"
2722
+ version = "3.23.0"
2723
+ source = { registry = "https://pypi.org/simple" }
2724
+ sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" }
2725
+ wheels = [
2726
+ { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" },
2727
+ ]
2728
+
2729
  [[package]]
2730
  name = "zstandard"
2731
  version = "0.25.0"