File size: 1,591 Bytes
d062874
 
b542780
841355e
 
9aa5d06
 
 
 
325df5b
4108095
 
d11c821
9aa5d06
d11c821
9aa5d06
 
 
 
 
 
d11c821
066f466
9aa5d06
3f1273f
 
d11c821
3f1273f
 
 
d11c821
3f1273f
d11c821
9aa5d06
066f466
9aa5d06
 
d11c821
9aa5d06
 
 
 
d11c821
9aa5d06
066f466
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46


import streamlit as st
import requests

# Set the page configuration
st.set_page_config(page_title="RadiantScriptor")

# Function to call the Hugging Face model API
def query_huggingface_model(prompt):
    API_TOKEN = "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"  
    API_URL = "https://poxj7ux0l7kszkjs.us-east-1.aws.endpoints.huggingface.cloud"  
    
    headers = {"Authorization": f"Bearer {API_TOKEN}"}
    response = requests.post(API_URL, headers=headers, json={"inputs": prompt})
    
    if response.status_code == 200:
        return response.json()
    else:
        return {"error": response.text}


st.title("RadiantScriptor")

# User input for uploading a text file
uploaded_file = st.file_uploader("Upload a text file", type=["txt"])
user_prompt = ""

if uploaded_file is not None:
    # Read the contents of the uploaded text file
    user_prompt = uploaded_file.read().decode("utf-8")
    # Display the content to the user (optional)
    st.text_area("Uploaded Text:", value=user_prompt, height=150)

if st.button("Generate Report-Findings"):
    with st.spinner('Generating report...'):
        # Query the Hugging Face model API
        response = query_huggingface_model(user_prompt)
        if "error" in response:
            st.error(f"Error: {response['error']}")
        else:
            # Assuming the response is a JSON object containing the generated text
            report = response[0]['generated_text']  # Adjust based on the actual response structure
            # Display the report
            st.text_area("Generated findings:", value=report, height=300)