|
import chainlit as cl |
|
import logging |
|
import sys |
|
from dotenv import find_dotenv, load_dotenv |
|
|
|
load_dotenv(find_dotenv()) |
|
logging.basicConfig(stream=sys.stdout, level=logging.INFO) |
|
logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout)) |
|
|
|
import llama_index |
|
from llama_index.core import set_global_handler |
|
|
|
|
|
|
|
|
|
from .globals import ( |
|
DEFAULT_QUESTION1, |
|
DEFAULT_QUESTION2, |
|
gpt35_model, |
|
gpt4_mode |
|
) |
|
|
|
@cl.on_message |
|
async def main(message: cl.Message): |
|
|
|
|
|
|
|
await cl.Message( |
|
content=f"Received: {message.content}", |
|
).send() |
|
|
|
@cl.on_chat_start |
|
async def start(): |
|
|
|
await cl.Message( |
|
content="How can I help you about Meta's 2023 10K?" |
|
).send() |
|
|