CosmoAI commited on
Commit
1de9151
1 Parent(s): 545c6ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -25
app.py CHANGED
@@ -1,42 +1,42 @@
1
- # import google.generativeai as palm
2
  import streamlit as st
3
- # import os
4
 
5
- # # Set your API key
6
- # palm.configure(api_key = os.environ['PALM_KEY'])
7
 
8
- # # Select the PaLM 2 model
9
- # model = 'models/text-bison-001'
10
 
11
- # # Generate text
12
- # if prompt := st.chat_input("Ask your query..."):
13
- # enprom = f"""Act as bhagwan krishna and Answer the below provided input in context to Bhagwad Geeta. Use the verses and chapters sentences as references to your answer with suggestions
14
- # coming from Bhagwad Geeta. Your answer to below input should only be in context to Bhagwad geeta.\nInput= {prompt}"""
15
- # completion = palm.generate_text(model=model, prompt=enprom, temperature=0.5, max_output_tokens=800)
16
 
17
- # # response = palm.chat(messages=["Hello."])
18
- # # print(response.last) # 'Hello! What can I help you with?'
19
- # # response.reply("Can you tell me a joke?")
20
 
21
- # # Print the generated text
22
- # with st.chat_message("Assistant"):
23
- # st.write(prompt)
24
- # st.write(completion.result)
25
 
26
 
27
 
28
 
29
 
30
- from transformers import AutoTokenizer, AutoModelForCausalLM
31
 
32
- tokenizer = AutoTokenizer.from_pretrained("google/gemma-7b")
33
- model = AutoModelForCausalLM.from_pretrained("google/gemma-7b")
34
 
35
- input_text = "Write me a poem about Machine Learning."
36
- input_ids = tokenizer(input_text, return_tensors="pt")
37
 
38
- outputs = model.generate(**input_ids)
39
- st.write(tokenizer.decode(outputs[0]))
40
 
41
 
42
 
 
1
+ import google.generativeai as palm
2
  import streamlit as st
3
+ import os
4
 
5
+ # Set your API key
6
+ palm.configure(api_key = os.environ['PALM_KEY'])
7
 
8
+ # Select the PaLM 2 model
9
+ model = 'models/text-bison-001'
10
 
11
+ # Generate text
12
+ if prompt := st.chat_input("Ask your query..."):
13
+ enprom = f"""Act as bhagwan krishna and Answer the below provided input in context to Bhagwad Geeta. Use the verses and chapters sentences as references to your answer with suggestions
14
+ coming from Bhagwad Geeta. Your answer to below input should only be in context to Bhagwad geeta.\nInput= {prompt}"""
15
+ completion = palm.generate_text(model=model, prompt=enprom, temperature=0.5, max_output_tokens=800)
16
 
17
+ # response = palm.chat(messages=["Hello."])
18
+ # print(response.last) # 'Hello! What can I help you with?'
19
+ # response.reply("Can you tell me a joke?")
20
 
21
+ # Print the generated text
22
+ with st.chat_message("Assistant"):
23
+ st.write(prompt)
24
+ st.write(completion.result)
25
 
26
 
27
 
28
 
29
 
30
+ # from transformers import AutoTokenizer, AutoModelForCausalLM
31
 
32
+ # tokenizer = AutoTokenizer.from_pretrained("google/gemma-7b")
33
+ # model = AutoModelForCausalLM.from_pretrained("google/gemma-7b")
34
 
35
+ # input_text = "Write me a poem about Machine Learning."
36
+ # input_ids = tokenizer(input_text, return_tensors="pt")
37
 
38
+ # outputs = model.generate(**input_ids)
39
+ # st.write(tokenizer.decode(outputs[0]))
40
 
41
 
42