ErikH commited on
Commit
18e53c5
1 Parent(s): a9e1591

Update pages/bot.py

Browse files
Files changed (1) hide show
  1. pages/bot.py +20 -2
pages/bot.py CHANGED
@@ -100,13 +100,30 @@ def main():
100
  st.text(answer)
101
  st.text(type(answer))
102
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
103
  #IDEE Retriever erweitern
104
- template = """Answer the question based only on the following context:
105
 
106
  {context}
107
 
108
  Question: {question}
109
- """
110
  prompt = ChatPromptTemplate.from_template(template)
111
  model = AutoModel.from_pretrained("hkunlp/instructor-base")
112
 
@@ -124,6 +141,7 @@ def main():
124
 
125
  ausgabetext = chain.invoke(user_question)
126
  st.text(ausgabetext)
 
127
 
128
 
129
 
 
100
  st.text(answer)
101
  st.text(type(answer))
102
 
103
+ # Erstelle die Question Answering-Pipeline für Deutsch
104
+ qa_pipeline = pipeline("question-answering", model="deutsche-telekom/bert-multi-english-german-squad2", tokenizer="deutsche-telekom/bert-multi-english-german-squad2")
105
+
106
+ # Frage beantworten
107
+ answer = qa_pipeline(question=question, context=context)
108
+
109
+ # Gib die Antwort aus
110
+ st.text("Basisantwort:", answer["answer"])
111
+ st.text(answer)
112
+
113
+
114
+ generator = pipeline('text-generation', model = 'jphme/Llama-2-13b-chat-german')
115
+ generator(answer, max_length = 30, num_return_sequences=3)
116
+ st.text("Generierte Erweiterung:")
117
+ st.text(generator)
118
+
119
+ """
120
  #IDEE Retriever erweitern
121
+ template = Answer the question based only on the following context:
122
 
123
  {context}
124
 
125
  Question: {question}
126
+
127
  prompt = ChatPromptTemplate.from_template(template)
128
  model = AutoModel.from_pretrained("hkunlp/instructor-base")
129
 
 
141
 
142
  ausgabetext = chain.invoke(user_question)
143
  st.text(ausgabetext)
144
+ """
145
 
146
 
147