File size: 2,762 Bytes
4804944
cec6273
4804944
 
 
 
 
 
 
56a4ec8
d987823
 
 
4804944
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
be91379
d987823
 
4804944
a15b13b
4804944
 
 
 
a15b13b
4804944
 
 
 
 
 
 
a15b13b
4804944
 
a15b13b
d987823
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
from flask import Flask, redirect, request, session, url_for
import os
from authlib.integrations.flask_client import OAuth
from langchain.llms.huggingface_hub import HuggingFaceHub
from langchain.prompts import ChatPromptTemplate
from langchain.schema import StrOutputParser
from langchain.schema.runnable import Runnable
from langchain.schema.runnable.config import RunnableConfig
import chainlit as cl

app = Flask(__name__)
app.secret_key = 'YourSecretKey'  # Change this to a real secret key for production

# OAuth setup with Authlib
oauth = OAuth(app)
oauth.register(
    name='oauth_provider',
    client_id=os.getenv("OAUTH_CLIENT_ID"),
    client_secret=os.getenv("OAUTH_CLIENT_SECRET"),
    authorize_url=os.getenv("OPENID_PROVIDER_URL") + '/authorize',
    access_token_url=os.getenv("OPENID_PROVIDER_URL") + '/token',
    client_kwargs={'scope': os.getenv("OAUTH_SCOPES").split(',')},
    redirect_uri=f"https://{os.getenv('SPACE_HOST')}/login/callback"
)

print(f"REDIRECT URI: https://{os.getenv('SPACE_HOST')}/login/callback")

# Instantiate the LLM
llm = HuggingFaceHub(
    model_kwargs={"max_length": 500},
    repo_id="google/flan-t5-xxl",
    huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_TOKEN"),
)

# Initialize ChainLit with LLM
def initialize_chainlit():
    add_llm_provider(
        LangchainGenericProvider(
            id=llm._llm_type,
            name="HuggingFaceHub",
            llm=llm,
            is_chat=False,
        )
    )

# Setup chainlit callbacks
@cl.on_chat_start
async def on_chat_start():
    prompt = ChatPromptTemplate.from_messages([("human", "{question}")])
    runnable = prompt | llm | StrOutputParser()
    cl.user_session.set("runnable", runnable)

@cl.on_message
async def on_message(message: cl.Message):
    runnable = cl.user_session.get("runnable")  # type: Runnable
    msg = cl.Message(content="")
    async for chunk in runnable.astream(
        {"question": message.content},
        config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
    ):
        await msg.stream_token(chunk)
    await msg.send()

@app.route('/')
def home():
    return 'Home - <a href="/login">Login with OAuth Provider</a>'

@app.route('/login')
def login():
    redirect_uri = url_for('authorize', _external=True)
    return oauth.oauth_provider.authorize_redirect(redirect_uri)

@app.route('/login/callback')
def authorize():
    print('Logged in and language model initialized. Proceed with operations.')
    token = oauth.oauth_provider.authorize_access_token()
    # Initialize ChainLit or perform actions based on the authenticated user
    initialize_chainlit()
    return 'Logged in and language model initialized. Proceed with operations.'

if __name__ == "__main__":
    app.run(debug=True)