teslaalerts / app.py
llamazookeeper's picture
*
016719e
raw
history blame
No virus
736 Bytes
import streamlit as st
from llama_index import download_loader, SimpleDirectoryReader, StorageContext, load_index_from_storage
from llama_index import SimpleDirectoryReader, ServiceContext, StorageContext, VectorStoreIndex, download_loader
from llama_index.llms import HuggingFaceLLM
from llama_index.embeddings import HuggingFaceEmbedding
import torch
torch.set_default_device('cuda')
st.set_page_config(page_title="Tesla Cases", page_icon="", layout="wide")
st.title("Tesla Cases \n\n **Tesla Cases Insights at Your Fingertip**")
#st.balloons()
st.success("""
If you'd like to learn more about the technical details of Tesla cases, check out the LlamaIndex:
[How I built the Streamlit LLM application using LlamaIndex.])
""")