XThomasBU commited on
Commit
d987823
·
verified ·
1 Parent(s): 227a2f5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -72
app.py CHANGED
@@ -1,89 +1,64 @@
 
1
  import os
2
-
3
  from langchain.llms.huggingface_hub import HuggingFaceHub
4
  from langchain.prompts import ChatPromptTemplate
5
  from langchain.schema import StrOutputParser
6
  from langchain.schema.runnable import Runnable
7
  from langchain.schema.runnable.config import RunnableConfig
8
-
9
- from chainlit.playground.config import add_llm_provider
10
- from chainlit.playground.providers.langchain import LangchainGenericProvider
11
  import chainlit as cl
12
 
13
- from authlib.integrations.requests_client import OAuth2Session
14
- import os
15
-
16
- # Retrieving environment variables
17
- OAUTH_CLIENT_ID = os.getenv("OAUTH_CLIENT_ID")
18
- OAUTH_CLIENT_SECRET = os.getenv("OAUTH_CLIENT_SECRET")
19
- OAUTH_SCOPES = os.getenv("OAUTH_SCOPES").split(',') # Assuming OAUTH_SCOPES is a comma-separated list
20
- OPENID_PROVIDER_URL = os.getenv("OPENID_PROVIDER_URL")
21
- SPACE_HOST = os.getenv("SPACE_HOST")
22
-
23
- # Constructing the redirect URL using the SPACE_HOST variable
24
- redirect_uri = f"https://{SPACE_HOST}/login/callback"
25
-
26
- # Initializing the OAuth client/session with the retrieved environment variables
27
- oauth_client = OAuth2Session(client_id=OAUTH_CLIENT_ID,
28
- client_secret=OAUTH_CLIENT_SECRET, # Include client_secret if needed for the OAuth2Session setup
29
- scope=OAUTH_SCOPES,
30
- redirect_uri=redirect_uri)
31
-
32
- # Use the corrected method to generate the authorization URL
33
- authorization_url, state = oauth_client.create_authorization_url(OPENID_PROVIDER_URL + '/authorize')
34
-
35
- print(authorization_url, state)
36
- # The rest of your OAuth flow would go here, including redirecting the user to the authorization_url,
37
- # and then handling the redirect back to your application to exchange the code for a token.
38
-
39
 
40
  # Instantiate the LLM
41
  llm = HuggingFaceHub(
42
  model_kwargs={"max_length": 500},
43
  repo_id="google/flan-t5-xxl",
44
- huggingfacehub_api_token=os.environ["HUGGINGFACE_API_TOKEN"],
45
  )
46
 
47
- # Add the LLM provider
48
- add_llm_provider(
49
- LangchainGenericProvider(
50
- # It is important that the id of the provider matches the _llm_type
51
- id=llm._llm_type,
52
- # The name is not important. It will be displayed in the UI.
53
- name="HuggingFaceHub",
54
- # This should always be a Langchain llm instance (correctly configured)
55
- llm=llm,
56
- # If the LLM works with messages, set this to True
57
- is_chat=False,
58
  )
59
- )
60
-
61
- from typing import Dict, Optional
62
- import chainlit as cl
63
-
64
-
65
- @cl.on_chat_start
66
- async def on_chat_start():
67
-
68
- prompt = ChatPromptTemplate.from_messages(
69
- [
70
- ("human", "{question}"),
71
- ]
72
- )
73
- runnable = prompt | llm | StrOutputParser()
74
- cl.user_session.set("runnable", runnable)
75
-
76
-
77
- @cl.on_message
78
- async def on_message(message: cl.Message):
79
- runnable = cl.user_session.get("runnable") # type: Runnable
80
-
81
- msg = cl.Message(content="")
82
-
83
- async for chunk in runnable.astream(
84
- {"question": message.content},
85
- config=RunnableConfig(callbacks=[cl.LangchainCallbackHandler()]),
86
- ):
87
- await msg.stream_token(chunk)
88
 
89
- await msg.send()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, redirect, request, session, url_for
2
  import os
3
+ from authlib.integrations.flask_client import OAuth
4
  from langchain.llms.huggingface_hub import HuggingFaceHub
5
  from langchain.prompts import ChatPromptTemplate
6
  from langchain.schema import StrOutputParser
7
  from langchain.schema.runnable import Runnable
8
  from langchain.schema.runnable.config import RunnableConfig
 
 
 
9
  import chainlit as cl
10
 
11
+ app = Flask(__name__)
12
+ app.secret_key = 'YourSecretKey' # Change this to a real secret key for production
13
+
14
+ # OAuth setup with Authlib
15
+ oauth = OAuth(app)
16
+
17
+ # Assuming environment variables are set for OAuth
18
+ oauth.register(
19
+ name='oauth_provider',
20
+ client_id=os.getenv("OAUTH_CLIENT_ID"),
21
+ client_secret=os.getenv("OAUTH_CLIENT_SECRET"),
22
+ authorize_url=os.getenv("OPENID_PROVIDER_URL") + '/authorize',
23
+ access_token_url=os.getenv("OPENID_PROVIDER_URL") + '/token',
24
+ client_kwargs={'scope': os.getenv("OAUTH_SCOPES").split(',')},
25
+ redirect_uri=f"https://{os.getenv('SPACE_HOST')}/login/callback"
26
+ )
 
 
 
 
 
 
 
 
 
 
27
 
28
  # Instantiate the LLM
29
  llm = HuggingFaceHub(
30
  model_kwargs={"max_length": 500},
31
  repo_id="google/flan-t5-xxl",
32
+ huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_TOKEN"),
33
  )
34
 
35
+ # Initialize ChainLit with LLM
36
+ def initialize_chainlit():
37
+ add_llm_provider(
38
+ LangchainGenericProvider(
39
+ id=llm._llm_type,
40
+ name="HuggingFaceHub",
41
+ llm=llm,
42
+ is_chat=False,
43
+ )
 
 
44
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
+ @app.route('/')
47
+ def home():
48
+ return 'Home - <a href="/login">Login with OAuth Provider</a>'
49
+
50
+ @app.route('/login')
51
+ def login():
52
+ redirect_uri = url_for('authorize', _external=True)
53
+ return oauth.oauth_provider.authorize_redirect(redirect_uri)
54
+
55
+ @app.route('/login/callback')
56
+ def authorize():
57
+ token = oauth.oauth_provider.authorize_access_token()
58
+ # You can use token to fetch user info or proceed directly if not needed
59
+ # Here, initialize ChainLit or perform actions based on the authenticated user
60
+ initialize_chainlit()
61
+ return 'Logged in and language model initialized. Proceed with operations.'
62
+
63
+ if __name__ == "__main__":
64
+ app.run(debug=True)