File size: 1,955 Bytes
2fd2ef5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
from ctransformers import AutoModelForCausalLM
import gradio as gr

greety = """
As a derivate work of [Linkedin Automation System](https://medium.com/@gathnex) by Gathnex,
Follow us on [linkedin](https://www.linkedin.com/company/gathnex/) and [Github](https://github.com/gathnexadmin). A special thanks to the Gathnex team members who made a significant contribution to this project.
"""

llm = AutoModelForCausalLM.from_pretrained("zephyr-7b-beta.Q4_K_S.gguf",
model_type='mistral',
max_new_tokens = 1096,
threads = 3,
)

def stream(user_prompt):
    system_prompt = 'Below is an instruction that describes a task. Write a response that appropriately completes the request.'
    E_INST = "</s>"
    user, assistant = "<|user|>", "<|assistant|>"
    prompt = f"{system_prompt}{E_INST}\n{user}\n{user_prompt.strip()}{E_INST}\n{assistant}\n"
    for text in llm(prompt, stream=True, threads=3):
        print(text, end="", flush=True)

css = """
  h1 {
  text-align: center;
}
#duplicate-button {
  margin: auto;
  color: white;
  background: #1565c0;
  border-radius: 100vh;
}
.contain {
  max-width: 900px;
  margin: auto;
  padding-top: 1.5rem;
}
"""

chat_interface = gr.ChatInterface(
    fn=stream,
    additional_inputs_accordion_name = "Credentials",
    #additional_inputs=[
    #     gr.Textbox(label="OpenAI Key", lines=1),
    #     gr.Textbox(label="Linkedin Access Token", lines=1),
    ],
    stop_btn=None,
    examples=[
        ["explain Large language model"],
        ["what is quantum computing"]
    ],
)

with gr.Blocks(css=css) as demo:
    gr.HTML("<h1><center>Gathnex Linkedin Automation using Generative AI<h1><center>")
    gr.HTML("<h3><center><a href='https://medium.com/@gathnex'>Gathnex AI</a>💬<h3><center>")
    gr.DuplicateButton(value="Duplicate Space for private use", elem_id="duplicate-button")
    chat_interface.render()
    gr.Markdown(greety)
    
if __name__ == "__main__":
    demo.queue(max_size=10).launch()