Kushwanth Chowday Kandala commited on
Commit
64523d8
1 Parent(s): e488916

add file upload and format

Browse files
Files changed (1) hide show
  1. app.py +16 -11
app.py CHANGED
@@ -26,10 +26,10 @@ import torch
26
 
27
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
28
 
29
- if device != 'cuda':
30
- st.text(f"you are using {device}. This is much slower than using "
31
- "a CUDA-enabled GPU. If on colab you can chnage this by "
32
- "clicking Runtime > change runtime type > GPU.")
33
 
34
  model = SentenceTransformer("all-MiniLM-L6-v2", device=device)
35
  st.divider()
@@ -49,11 +49,11 @@ PINECONE_ENVIRONMENT=os.getenv("PINECONE_ENVIRONMENT")
49
 
50
  def connect_pinecone():
51
  pinecone = PineconeGRPC(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
52
- st.code(pinecone)
53
- st.divider()
54
- st.text(pinecone.list_indexes().names())
55
- st.divider()
56
- st.text(f"Succesfully connected to the pinecone")
57
  return pinecone
58
 
59
  def get_pinecone_semantic_index(pinecone):
@@ -70,7 +70,7 @@ def get_pinecone_semantic_index(pinecone):
70
  )
71
  # now connect to index
72
  index = pinecone.Index(index_name)
73
- st.text(f"Succesfully connected to the pinecone index")
74
  return index
75
 
76
  def chat_actions():
@@ -99,6 +99,7 @@ def chat_actions():
99
  resdf = pd.DataFrame(data, columns=['TopRank', 'Score', 'Text'])
100
 
101
  with st.sidebar:
 
102
  st.dataframe(resdf)
103
 
104
  for res in result['matches']:
@@ -133,4 +134,8 @@ for i in st.session_state["chat_history"]:
133
  # pinecone.init(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
134
  # st.text(pinecone)
135
 
136
-
 
 
 
 
 
26
 
27
  device = 'cuda' if torch.cuda.is_available() else 'cpu'
28
 
29
+ # if device != 'cuda':
30
+ # st.text(f"you are using {device}. This is much slower than using "
31
+ # "a CUDA-enabled GPU. If on colab you can chnage this by "
32
+ # "clicking Runtime > change runtime type > GPU.")
33
 
34
  model = SentenceTransformer("all-MiniLM-L6-v2", device=device)
35
  st.divider()
 
49
 
50
  def connect_pinecone():
51
  pinecone = PineconeGRPC(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
52
+ # st.code(pinecone)
53
+ # st.divider()
54
+ # st.text(pinecone.list_indexes().names())
55
+ # st.divider()
56
+ # st.text(f"Succesfully connected to the pinecone")
57
  return pinecone
58
 
59
  def get_pinecone_semantic_index(pinecone):
 
70
  )
71
  # now connect to index
72
  index = pinecone.Index(index_name)
73
+ # st.text(f"Succesfully connected to the pinecone index")
74
  return index
75
 
76
  def chat_actions():
 
99
  resdf = pd.DataFrame(data, columns=['TopRank', 'Score', 'Text'])
100
 
101
  with st.sidebar:
102
+ st.markdown("*:red[semantic search results]* with **:green[Retrieval Augmented Generation]** ***(RAG)***.")
103
  st.dataframe(resdf)
104
 
105
  for res in result['matches']:
 
134
  # pinecone.init(api_key=PINECONE_API_KEY, environment=PINECONE_ENV)
135
  # st.text(pinecone)
136
 
137
+ uploaded_files = st.file_uploader("Choose a CSV file", accept_multiple_files=True)
138
+ for uploaded_file in uploaded_files:
139
+ bytes_data = uploaded_file.read()
140
+ st.write("filename:", uploaded_file.name)
141
+ st.write(bytes_data)