Entz commited on
Commit
368a8f5
1 Parent(s): c515677

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -110
app.py DELETED
@@ -1,110 +0,0 @@
1
- import streamlit as st
2
- import pandas as pd
3
- import sqlite3
4
- from llama_index.core import StorageContext, load_index_from_storage
5
- from llama_index.llms.ollama import Ollama
6
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
7
- from llama_index.core import PromptTemplate
8
- import os
9
-
10
- version = 2.2
11
-
12
- # Initialize the SQLite3 database
13
- conn = sqlite3.connect('qa.db')
14
- c = conn.cursor()
15
- # Update the table creation to include the version column
16
- c.execute('CREATE TABLE IF NOT EXISTS qa (question TEXT, answer TEXT, version REAL)')
17
- conn.commit()
18
-
19
- # Read the LLM Model Description from a file
20
- def read_description_from_file(file_path):
21
- with open(file_path, 'r') as file:
22
- return file.read()
23
-
24
- # Define the folder containing the saved index
25
- INDEX_OUTPUT_PATH = "./output_index"
26
-
27
- # Ensure the output directory exists
28
- if not os.path.exists(INDEX_OUTPUT_PATH):
29
- raise ValueError(f"Index directory {INDEX_OUTPUT_PATH} does not exist")
30
-
31
- # Setup LLM and embedding model
32
- llm = Ollama(model="llama3", request_timeout=120.0)
33
- embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-large-en-v1.5", trust_remote_code=True)
34
-
35
- # To load the index later, set up the storage context
36
- storage_context = StorageContext.from_defaults(persist_dir=INDEX_OUTPUT_PATH)
37
- loaded_index = load_index_from_storage(embed_model=embed_model, storage_context=storage_context)
38
-
39
- # Define a query engine (assuming it needs the LLM and embedding model)
40
- query_engine = loaded_index.as_query_engine(llm=llm, embed_model=embed_model)
41
-
42
- # Customise prompt template
43
- # Read the prompt template from a file
44
- qa_prompt_tmpl_str = read_description_from_file("tab2_pe.txt")
45
- qa_prompt_tmpl = PromptTemplate(qa_prompt_tmpl_str)
46
-
47
- query_engine.update_prompts(
48
- {"response_synthesizer:text_qa_template": qa_prompt_tmpl}
49
- )
50
-
51
- # Save the question and answer to the SQLite3 database
52
- def save_to_db(question, answer, version):
53
- c.execute('INSERT INTO qa (question, answer, version) VALUES (?, ?, ?)', (question, answer, version))
54
- conn.commit()
55
-
56
- # Fetch all data from the SQLite3 database
57
- def fetch_from_db():
58
- c.execute('SELECT * FROM qa')
59
- return c.fetchall()
60
-
61
- def main():
62
- st.title("How Much Does Mistral 7B Model Know About Wandsworth Council?")
63
-
64
- tab1, tab2, tab3 = st.tabs(["LLM Model Description", "Ask a Question", "View Q&A History"])
65
-
66
- with tab1:
67
- st.subheader("LLM Model Description")
68
- description = read_description_from_file("tab1_intro.txt")
69
- st.write(description)
70
-
71
- with tab2:
72
- st.subheader("Ask a Question")
73
- question = st.text_input("Enter your question:")
74
- if st.button("Get Answer"):
75
- if question:
76
- try:
77
- response = query_engine.query(question)
78
-
79
- # Try to extract the generated text
80
- try:
81
- # Extract the text from the response object (assuming it has a `text` attribute or method)
82
- if hasattr(response, 'text'):
83
- answer = response.text
84
- else:
85
- answer = str(response)
86
-
87
- except AttributeError as e:
88
- st.error(f"Error extracting text from response: {e}")
89
- answer = "Sorry, could not generate an answer."
90
-
91
- st.write(f"**Answer:** {answer}")
92
-
93
- # Save question and answer to database
94
- save_to_db(question, answer, version)
95
- except Exception as e:
96
- st.error(f"An error occurred: {e}")
97
- else:
98
- st.warning("Please enter a question")
99
-
100
- with tab3:
101
- st.subheader("View Q&A History")
102
- qa_data = fetch_from_db()
103
- if qa_data:
104
- df = pd.DataFrame(qa_data, columns=["Question", "Answer", "Version"])
105
- st.dataframe(df)
106
- else:
107
- st.write("No data available")
108
-
109
- if __name__ == "__main__":
110
- main()