File size: 4,823 Bytes
8304158 d424f51 8304158 a1c89a4 8304158 a1c89a4 8304158 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 |
import os
import json
import chainlit as cl
from openai import AsyncOpenAI
from config import AUDITOR_PROMPT, CRITIC_PROMPT
from dotenv import load_dotenv
# Instrument the OpenAI client
cl.instrument_openai()
settings = {
"model": "gpt-3.5-turbo",
"temperature": 0,
# Additional settings can be added here
}
async def initiate_analysis(file_content):
# Informing the user that the auditor's analysis is starting with an appropriate emoji
await cl.Message(content="π Starting auditor analysis...").send()
auditor_response = await perform_auditor_analysis(file_content)
# Formatting and sending the auditor's response with an emoji and in pretty-printed JSON format
await cl.Message(content=f"π Auditor response:\n```json\n{format_response(auditor_response)}\n```").send()
# Informing the user that the critic's analysis is starting with an appropriate emoji
await cl.Message(content="π΅οΈ Starting critic analysis...").send()
critic_response = await perform_critic_analysis(auditor_response)
# Formatting and sending the critic's response with an emoji and in pretty-printed JSON format
await cl.Message(content=f"π‘ Critic response:\n```json\n{format_response(critic_response)}\n```").send()
# Ask the user if they would like to continue with emoji for options
continue_further = await cl.AskActionMessage(
content="Would you like to continue? π",
actions=[
cl.Action(name="yes", value="yes", label="β
Yes, continue"),
cl.Action(name="no", value="no", label="β No, stop"),
],
).send()
if continue_further.get("value") == "yes":
await initiate_analysis(file_content) # Recursive call to continue the analysis with the same file content
else:
await cl.Message(content="π Thanks for using GPTLens!").send()
async def perform_auditor_analysis(file_content):
auditor = await client.chat.completions.create(
messages=[
{"content": AUDITOR_PROMPT, "role": "system"},
{"content": file_content, "role": "user"}
],
**settings
)
return auditor.choices[0].message.content
async def perform_critic_analysis(auditor_response):
critic = await client.chat.completions.create(
messages=[
{"content": CRITIC_PROMPT, "role": "system"},
{"content": auditor_response, "role": "user"}
],
**settings
)
return critic.choices[0].message.content
def format_response(response):
try:
# Attempt to load the response as JSON and format it
json_obj = json.loads(response)
formatted_json = json.dumps(json_obj, indent=2) # Pretty-print JSON
return formatted_json
except json.JSONDecodeError:
# If the response is not valid JSON, return it as is
return response
@cl.on_chat_start
async def prestart():
pass
@cl.on_message
async def start():
api_key_message = await cl.AskUserMessage(content="π Please enter your OpenAI API key:").send()
await cl.Message(content="β
API key used only for this session, don't worry!").send()
global client
client = AsyncOpenAI(api_key=api_key_message['output'])
model_type = await cl.AskActionMessage(
content="Pick a model!",
actions=[
cl.Action(name="gpt3", value="gpt3", label="GPT-3.5 Turbo"),
cl.Action(name="gpt4", value="gpt4", label="GPT-4"),
cl.Action(name="gpt4turbo", value="gpt4turbo", label="GPT-4 Turbo Preview"),
],
).send()
if model_type:
settings['model'] = {
"gpt3": "gpt-3.5-turbo",
"gpt4": "gpt-4",
"gpt4turbo": "gpt-4-turbo-preview"
}.get(model_type.get("value"), settings['model'])
temperature = await cl.AskUserMessage(content="Give the temperature value (between 0 and 1):").send()
if temperature:
try:
temperature_value = float(temperature['output'])
if 0 <= temperature_value <= 1:
settings["temperature"] = temperature_value
except ValueError:
await cl.Message(content="Invalid temperature value provided. Using default.").send()
files = []
while not files:
files = await cl.AskFileMessage(
content="Please upload a text file (.txt) or a Solidity file (.sol) to begin!",
accept={"text/plain": [".sol", ".txt"]}
).send()
await cl.Message(content="π Setup complete! Ready to start analysis.").send()
if files:
text_file = files[0]
with open(text_file.path, "r", encoding="utf-8") as f:
file_content = f.read()
# Initiate the analysis with the uploaded file content
await initiate_analysis(file_content)
|