File size: 3,649 Bytes
95744d9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
import utils
import streamlit as st
from streaming import StreamHandler
import torch
# from LLM import OpenAI
from langchain.chains import ConversationChain
from langchain.memory import ConversationBufferMemory
from langchain.llms import HuggingFaceHub
from transformers import AutoConfig
from langchain.llms import HuggingFaceHub
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
from langchain.llms import HuggingFacePipeline
from langchain.chat_models import ChatOllama
import pandas as pd
import requests
import tempfile
import os
 
st.set_page_config(page_title="Chatbot", page_icon="πŸ’¬")
st.header('Ebiz Chatbot')
st.write('Allows users to interact with the LLM')
# st.write('[![view source code ](https://img.shields.io/badge/view_source_code-gray?logo=github)](https://github.com/shashankdeshpande/langchain-chatbot/blob/master/pages/1_%F0%9F%92%AC_basic_chatbot.py)')


# Using "with" notation
with st.sidebar:
    
    ktopVal= st.slider(
        'K-top',
        0.0, 1.0,step=0.1)
     
    ptopVal=st.slider(
        'P-top',
        0.0, 1.0,step=0.1)
    tempVal=st.slider(
        'Temperature',
        0.0, 1.0, step=0.1)
    
    uploaded_file = st.file_uploader("Choose a file")
    submit_button = st.button("Submit")
 

    if submit_button:
        # Display the file details
        st.write("File Details:")
        st.write(f"Filename: {uploaded_file.name}")
        st.write(f"File Type: {uploaded_file.type}")
        st.write(f"File Size: {uploaded_file.size} bytes")

        # Send file to Flask server
        url = "http://127.0.0.1:5000/upload"
        files = {'file': (uploaded_file.name, uploaded_file, uploaded_file.type)}
        payload = {'ktopVal':ktopVal,"ptopVal":ptopVal,"tempVal":tempVal}
        response = requests.post(url, files=files,data=payload)

        # Display the response from the server
        st.write("Server Response:")
        st.write(response.text)


class Basic:

    def __init__(self):
        utils.configure_openai_api_key()
        self.openai_model = "gpt-3.5-turbo"
    
    def setup_chain(self):
        # config = AutoConfig.from_pretrained('stabilityai/stablelm-3b-4e1t', trust_remote_code=True)
        model_name = 'stabilityai/stablelm-3b-4e1t'
        torch_dtype = "float16" #@param ["float16", "bfloat16", "float"]
        load_in_8bit = False #@param {type:"boolean"}
        device_map = "auto"
    #     tokenizer = AutoTokenizer.from_pretrained(model_name,trust_remote_code=True)
    #     model = AutoModelForCausalLM.from_pretrained(
    #     model_name,
    #     torch_dtype=getattr(torch, torch_dtype),
    #     load_in_8bit=load_in_8bit,
    #     device_map=device_map,
    #     trust_remote_code=True,
    #     offload_folder="./offload",
    # )
        # pipe = pipeline(
        #         "text-generation", model=model, tokenizer=tokenizer, max_new_tokens=256
        #     )
        
        llm = ChatOllama(model="llama2")
        memory = ConversationBufferMemory()

        chain = ConversationChain(llm=llm, memory=memory, verbose=True)
        return chain
    
    @utils.enable_chat_history
    def main(self):
        chain = self.setup_chain()
        user_query = st.chat_input(placeholder="Ask me anything!")
        if user_query:
            utils.display_msg(user_query, 'user')
            with st.chat_message("assistant"):
                st_cb = StreamHandler(st.empty())
                response = chain(user_query, callbacks=[st_cb])
                st.session_state.messages.append({"role": "assistant", "content": response['response']})

if __name__ == "__main__":
    obj = Basic()
    obj.main()