Spaces:
Sleeping
Sleeping
| import os | |
| from openai import OpenAI | |
| import json | |
| from datetime import datetime | |
| from scenario_handler import ScenarioHandler | |
| client = OpenAI(api_key="sk-proj-3IEelWYK3Wl251k9qNriT3BlbkFJ9M7GpUGBijobUj1LETdu") | |
| def chatbot_response(response, handler_type='offender', n=1): | |
| scenario_handler = ScenarioHandler() | |
| if handler_type == 'offender': | |
| scenario_messages = scenario_handler.handle_offender() | |
| else: | |
| scenario_messages = scenario_handler.handle_victim() | |
| messages = [{"role": "system", "content": "You are a chatbot."}] | |
| messages.extend(scenario_messages) | |
| messages.append({"role": "user", "content": response}) | |
| api_response = client.chat.completions.create( | |
| model="gpt-4", | |
| temperature=0.8, | |
| top_p=0.9, | |
| max_tokens=300, | |
| n=n, | |
| frequency_penalty=0.5, | |
| presence_penalty=0.5, | |
| messages=messages | |
| ) | |
| choices = [choice.message.content for choice in api_response.choices] | |
| return choices[0], choices | |
| def save_history(history): | |
| os.makedirs('logs', exist_ok=True) | |
| timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") | |
| filename = os.path.join('logs', f'chat_history_{timestamp}.json') | |
| with open(filename, 'w', encoding='utf-8') as file: | |
| json.dump(history, file, ensure_ascii=False, indent=4) | |
| print(f"History saved to {filename}") | |
| async def process_user_input(user_input, chatbot_history): | |
| if user_input.strip().lower() == "μ’ λ£": | |
| save_history(chatbot_history) | |
| return chatbot_history + [("μ’ λ£", "μ€νμ μ°Έκ°ν΄ μ£Όμ μ κ°μ¬ν©λλ€. νμ μ§μλ₯Ό λ°λΌμ£ΌμΈμ")], [] | |
| victim_response, victim_choices, offender_response = await get_both_responses(user_input) | |
| new_history = chatbot_history + [(user_input, victim_response), (None, offender_response)] | |
| return new_history, victim_choices |