File size: 2,720 Bytes
15c7cc8
a7ac16a
 
15c7cc8
 
391e04b
 
 
 
2cdea27
15c7cc8
391e04b
 
2cdea27
391e04b
 
 
a7ac16a
 
 
 
 
e64b0f5
856b14c
 
 
a7ac16a
 
df62058
 
 
 
 
 
 
 
 
 
 
a7ac16a
 
 
 
 
 
15c7cc8
391e04b
 
 
1a197b5
391e04b
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
import streamlit as st
import torch
from transformers import pipeline


with st.sidebar: 
    st.image("https://www.onepointltd.com/wp-content/uploads/2020/03/inno2.png")
    st.title("Campus Comrade Assistant")
    choice = st.radio("Navigation", ["Home/about","Meal Suggester","Accomodation available", "Assignment assist", "Events"])
    st.info("This project application helps you accomplish both major and minor tasks efficiently😉.")

if choice == "Home/about":
    st.title("Welcome to Campus Comrade Assitant")
    st.write("In this app you can book an event, get balanced diet meal suggestions, find accomodation around you and so much more. welcome to your onestop comrade assist app🔥")
    
if choice == "Meal Suggester": 
    st.title("Time to Eat")
    st.write("Feeling Hungry? Let get you some nice meal to silence that grumbling. In a healthy way ofcourse")# Install transformers from source - only needed for versions <= v4.34
    # pip install git+https://github.com/huggingface/transformers.git
    # pip install accelerate
    
    pipe = pipeline("text-generation", model="HuggingFaceH4/zephyr-7b-beta", torch_dtype=torch.bfloat16, device_map="auto")

    user_input = st.chat_input("Say something")
    if user_input:
        st.write(f"you have said {user_input}")
    
    # We use the tokenizer's chat template to format each message - see https://huggingface.co/docs/transformers/main/en/chat_templating
        messages = [
            {
                "role": "system",
                "content": "You are a friendly chatbot who always responds in the style of a pirate",
            },
            {"role": "user", "content": f"{user_input}"},
        ]
        prompt = pipe.tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
        outputs = pipe(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
        st.write(outputs[0]["generated_text"])
        # <|system|>
    # You are a friendly chatbot who always responds in the style of a pirate.</s>
    # <|user|>
    # How many helicopters can a human eat in one sitting?</s>
    # <|assistant|>
    # Ah, me hearty matey! But yer question be a puzzler! A human cannot eat a helicopter in one sitting, as helicopters are not edible. They be made of metal, plastic, and other materials, not food!


if choice == "Accomodation available":
    st.title("Ready to Move in?")
    st.write("Find Best value rooms around you. Live in a cozy and quiet environment, or a Lively bustling Street. You decide")

if choice == "Assignment assist": 
    st.title("Let's complete that assignment")
    
if choice == "Events": 
    st.title("Collaborate and Jazz. Events around You")