XThomasBU's picture
Update app.py
d987823 verified
raw
history blame
2.12 kB
from flask import Flask, redirect, request, session, url_for
import os
from authlib.integrations.flask_client import OAuth
from langchain.llms.huggingface_hub import HuggingFaceHub
from langchain.prompts import ChatPromptTemplate
from langchain.schema import StrOutputParser
from langchain.schema.runnable import Runnable
from langchain.schema.runnable.config import RunnableConfig
import chainlit as cl
app = Flask(__name__)
app.secret_key = 'YourSecretKey' # Change this to a real secret key for production
# OAuth setup with Authlib
oauth = OAuth(app)
# Assuming environment variables are set for OAuth
oauth.register(
name='oauth_provider',
client_id=os.getenv("OAUTH_CLIENT_ID"),
client_secret=os.getenv("OAUTH_CLIENT_SECRET"),
authorize_url=os.getenv("OPENID_PROVIDER_URL") + '/authorize',
access_token_url=os.getenv("OPENID_PROVIDER_URL") + '/token',
client_kwargs={'scope': os.getenv("OAUTH_SCOPES").split(',')},
redirect_uri=f"https://{os.getenv('SPACE_HOST')}/login/callback"
)
# Instantiate the LLM
llm = HuggingFaceHub(
model_kwargs={"max_length": 500},
repo_id="google/flan-t5-xxl",
huggingfacehub_api_token=os.getenv("HUGGINGFACE_API_TOKEN"),
)
# Initialize ChainLit with LLM
def initialize_chainlit():
add_llm_provider(
LangchainGenericProvider(
id=llm._llm_type,
name="HuggingFaceHub",
llm=llm,
is_chat=False,
)
)
@app.route('/')
def home():
return 'Home - <a href="/login">Login with OAuth Provider</a>'
@app.route('/login')
def login():
redirect_uri = url_for('authorize', _external=True)
return oauth.oauth_provider.authorize_redirect(redirect_uri)
@app.route('/login/callback')
def authorize():
token = oauth.oauth_provider.authorize_access_token()
# You can use token to fetch user info or proceed directly if not needed
# Here, initialize ChainLit or perform actions based on the authenticated user
initialize_chainlit()
return 'Logged in and language model initialized. Proceed with operations.'
if __name__ == "__main__":
app.run(debug=True)