Spaces:
Sleeping
Sleeping
import streamlit as st | |
from transformers import pipeline, AutoTokenizer, AutoModelForQuestionAnswering | |
import torch | |
from huggingface_hub import HfApi | |
#Sidebar menu | |
st.sidebar.title('Menu') | |
home = st.sidebar.checkbox("Home") | |
time_series = st.sidebar.checkbox('Time Series Data') | |
chatbot = st.sidebar.checkbox('Chatbot') | |
if home: | |
st.title("Food Security in Africa and Asia") | |
st.text("Hi there! I'm your food security assistant. Food security means everyone has access to safe, nutritious food to meet their dietary needs.\n" | |
"Want to learn more about food insecurity, its causes, or potential solutions?") | |
if time_series: | |
st.header("Time series data from 2000 to 2022") | |
st.text("This data was collected from trusted organizations and depict metrcis on food security based on climate change and food produduced") | |
if chatbot: | |
st.header("Chat with me.") | |
text = st.text_area("Food security is a global challenge. Let's work together to find solutions. How can I help you today?") | |
tokenizer = AutoTokenizer.from_pretrained("google/gemma-2-9b-it") | |
model = AutoModelForQuestionAnswering.from_pretrained( | |
"google/gemma-2-9b-it", | |
device_map="auto", | |
torch_dtype=torch.bfloat16) | |
if text: | |
input_ids = tokenizer(text, return_tensors="pt").to("cuda") | |
outputs = model.generate(**input_ids) | |
st.write(tokenizer.decode(outputs[0])) | |
''' | |
if chatbot: | |
st.header("Chat with me.") | |
text = st.text_area("Food security is a global challenge. Let's work together to find solutions. How can I help you today?") | |
pipe = pipeline("question-answering", model=model) | |
if text: | |
out = pipe(text) | |
st.write(out) | |
''' |