TheoLvs commited on
Commit
f842a0e
1 Parent(s): 9a9100e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -23
app.py CHANGED
@@ -7,35 +7,24 @@ import pandas as pd
7
  import numpy as np
8
  import os
9
  import time
 
 
10
  from datetime import datetime
 
11
 
12
  from utils import create_user_id
13
 
14
- from azure.storage.fileshare import ShareServiceClient
15
 
16
 
17
- import re
18
- import json
19
- print("1")
20
  # ClimateQ&A imports
21
  from climateqa.engine.llm import get_llm
22
- print("2")
23
- # from climateqa.chains import load_qa_chain_with_docs,load_qa_chain_with_text
24
- # from climateqa.chains import load_reformulation_chain
25
  from climateqa.engine.rag import make_rag_chain
26
- print("3")
27
  from climateqa.engine.vectorstore import get_pinecone_vectorstore
28
- print("4")
29
  from climateqa.engine.retriever import ClimateQARetriever
30
- print("5")
31
  from climateqa.engine.embeddings import get_embeddings_function
32
- print("6")
33
  from climateqa.engine.prompts import audience_prompts
34
- print("7")
35
  from climateqa.sample_questions import QUESTIONS
36
- print("8")
37
  from climateqa.constants import POSSIBLE_REPORTS
38
- print("9")
39
  from climateqa.utils import get_image_from_azure_blob_storage
40
 
41
  # Load environment variables in local mode
@@ -52,8 +41,6 @@ theme = gr.themes.Base(
52
  font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
53
  )
54
 
55
- print("1")
56
-
57
 
58
 
59
  init_prompt = ""
@@ -97,13 +84,6 @@ def parse_output_llm_with_sources(output):
97
  return content_parts
98
 
99
 
100
-
101
- # Create embeddings function and LLM
102
- print("1")
103
- embeddings_function = get_embeddings_function()
104
- print("1")
105
-
106
-
107
  # Create vectorstore and retriever
108
  vectorstore = get_pinecone_vectorstore(embeddings_function)
109
 
 
7
  import numpy as np
8
  import os
9
  import time
10
+ import re
11
+ import json
12
  from datetime import datetime
13
+ from azure.storage.fileshare import ShareServiceClient
14
 
15
  from utils import create_user_id
16
 
 
17
 
18
 
 
 
 
19
  # ClimateQ&A imports
20
  from climateqa.engine.llm import get_llm
 
 
 
21
  from climateqa.engine.rag import make_rag_chain
 
22
  from climateqa.engine.vectorstore import get_pinecone_vectorstore
 
23
  from climateqa.engine.retriever import ClimateQARetriever
 
24
  from climateqa.engine.embeddings import get_embeddings_function
 
25
  from climateqa.engine.prompts import audience_prompts
 
26
  from climateqa.sample_questions import QUESTIONS
 
27
  from climateqa.constants import POSSIBLE_REPORTS
 
28
  from climateqa.utils import get_image_from_azure_blob_storage
29
 
30
  # Load environment variables in local mode
 
41
  font=[gr.themes.GoogleFont("Poppins"), "ui-sans-serif", "system-ui", "sans-serif"],
42
  )
43
 
 
 
44
 
45
 
46
  init_prompt = ""
 
84
  return content_parts
85
 
86
 
 
 
 
 
 
 
 
87
  # Create vectorstore and retriever
88
  vectorstore = get_pinecone_vectorstore(embeddings_function)
89