diff --git a/Dockerfile b/Dockerfile index 68c2e1aad63ac50c36ab8109b808c982e55935c2..98b6a4d57d7365f8e5a36818a21aa5583858dee4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,21 +1,13 @@ -FROM openjdk:17-slim +FROM python:3.11-slim -RUN apt-get update && apt-get install -y python3 python3-pip +RUN useradd -m -u 1000 user +USER user +ENV PATH="/home/user/.local/bin:$PATH" WORKDIR /app +COPY --chown=user ./requirements.txt requirements.txt +RUN pip install --no-cache-dir --upgrade -r requirements.txt -COPY . . - -RUN pip3 install -r requirements.txt - - -# Expose port -EXPOSE 7860 - -# Health check -HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \ - CMD curl -f http://localhost:7860/health || exit 1 - -# Start the application -CMD ["python3", "-m", "uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] \ No newline at end of file +COPY --chown=user . /app +CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/app.py b/app.py index 149bc93a688acae6acf5151483e7e01ba838ac6b..2f4dfea43672a8ac321d9358e220dd41d88bd6de 100644 --- a/app.py +++ b/app.py @@ -1,1046 +1,12 @@ -import os -import tempfile -import shutil -import subprocess -import asyncio -from pathlib import Path -from typing import List, Optional, Dict, Any -import uuid -import re -import time -import hashlib -from fastapi import ( - UploadFile, - File, - Form, - HTTPException, - BackgroundTasks, - APIRouter, - FastAPI, -) -from fastapi.responses import JSONResponse -from pydantic import BaseModel -import json -import resource -import platform -from loguru import logger - -app = FastAPI(title="Code Grader API", version="1.0.0", docs_url="/") - -# Configuration -MAX_EXECUTION_TIME = 5 # seconds -MAX_MEMORY = 256 * 1024 * 1024 # 256MB -MAX_FILE_SIZE = 10 * 1024 * 1024 # 10MB -MAX_TOTAL_SIZE = 2 * 1024 * 1024 # 2MB total size limit for uploads and repos - - -class ExecutionResult(BaseModel): - success: bool - stdout: str - stderr: str - execution_time: float - exit_code: int - error: Optional[str] = None - - -class InputPattern(BaseModel): - type: str # "input", "scanf", "cin", etc. - line_number: int - variable_name: Optional[str] = None - prompt_message: Optional[str] = None - data_type: Optional[str] = None # "int", "str", "float", etc. - raw_code: str - - -class InputAnalysisResult(BaseModel): - language: str - total_inputs: int - input_patterns: List[InputPattern] - suggestions: List[str] # UI suggestions for user - - -class CodeExecutor: - def __init__(self): - self.compilers = { - "c": "clang" if platform.system() == "Darwin" else "gcc", - "cpp": "clang++" if platform.system() == "Darwin" else "g++", - "java": "javac", - "python": "python3", - } - - def set_resource_limits(self): - """Set resource limits for subprocess (Unix only)""" - if platform.system() == "Linux": - resource.setrlimit( - resource.RLIMIT_CPU, (MAX_EXECUTION_TIME, MAX_EXECUTION_TIME) - ) - resource.setrlimit(resource.RLIMIT_AS, (MAX_MEMORY, MAX_MEMORY)) - - def analyze_input_patterns( - self, language: str, file_contents: Dict[str, str] - ) -> InputAnalysisResult: - """Analyze code files to detect input patterns""" - patterns = [] - - if language == "python": - patterns = self._analyze_python_inputs(file_contents) - elif language == "java": - patterns = self._analyze_java_inputs(file_contents) - elif language in ["c", "cpp"]: - patterns = self._analyze_c_cpp_inputs(file_contents) - - suggestions = self._generate_input_suggestions(patterns, language) - - return InputAnalysisResult( - language=language, - total_inputs=len(patterns), - input_patterns=patterns, - suggestions=suggestions, - ) - - def _analyze_python_inputs( - self, file_contents: Dict[str, str] - ) -> List[InputPattern]: - """Analyze Python files for input() patterns""" - patterns = [] - - for filename, content in file_contents.items(): - lines = content.split("\n") - - for i, line in enumerate(lines, 1): - # Pattern 1: variable = input("prompt") - match = re.search( - r'(\w+)\s*=\s*input\s*\(\s*["\']([^"\']*)["\']?\s*\)', line - ) - if match: - var_name, prompt = match.groups() - patterns.append( - InputPattern( - type="input", - line_number=i, - variable_name=var_name, - prompt_message=prompt or f"Enter value for {var_name}", - data_type="str", - raw_code=line.strip(), - ) - ) - continue - - # Pattern 2: variable = int(input("prompt")) - match = re.search( - r'(\w+)\s*=\s*(int|float|str)\s*\(\s*input\s*\(\s*["\']([^"\']*)["\']?\s*\)\s*\)', - line, - ) - if match: - var_name, data_type, prompt = match.groups() - patterns.append( - InputPattern( - type="input", - line_number=i, - variable_name=var_name, - prompt_message=prompt - or f"Enter {data_type} value for {var_name}", - data_type=data_type, - raw_code=line.strip(), - ) - ) - continue - - # Pattern 3: Simple input() without assignment - if "input(" in line and "=" not in line: - patterns.append( - InputPattern( - type="input", - line_number=i, - variable_name=None, - prompt_message="Enter input", - data_type="str", - raw_code=line.strip(), - ) - ) - - return patterns - - def _analyze_java_inputs(self, file_contents: Dict[str, str]) -> List[InputPattern]: - """Analyze Java files for Scanner input patterns""" - patterns = [] - - for filename, content in file_contents.items(): - lines = content.split("\n") - - for i, line in enumerate(lines, 1): - # Pattern 1: scanner.nextInt(), scanner.nextLine(), etc. - match = re.search(r"(\w+)\s*=\s*(\w+)\.next(\w+)\s*\(\s*\)", line) - if match: - var_name, scanner_name, method = match.groups() - data_type = self._java_method_to_type(method) - patterns.append( - InputPattern( - type="scanner", - line_number=i, - variable_name=var_name, - prompt_message=f"Enter {data_type} value for {var_name}", - data_type=data_type, - raw_code=line.strip(), - ) - ) - continue - - # Pattern 2: Direct scanner calls without assignment - match = re.search(r"(\w+)\.next(\w+)\s*\(\s*\)", line) - if match and "=" not in line: - scanner_name, method = match.groups() - data_type = self._java_method_to_type(method) - patterns.append( - InputPattern( - type="scanner", - line_number=i, - variable_name=None, - prompt_message=f"Enter {data_type} input", - data_type=data_type, - raw_code=line.strip(), - ) - ) - - return patterns - - def _analyze_c_cpp_inputs( - self, file_contents: Dict[str, str] - ) -> List[InputPattern]: - """Analyze C/C++ files for input patterns""" - patterns = [] - - for filename, content in file_contents.items(): - lines = content.split("\n") - - for i, line in enumerate(lines, 1): - # Pattern 1: scanf("%d", &variable) - match = re.search( - r'scanf\s*\(\s*["\']([^"\']*)["\'],\s*&(\w+)\s*\)', line - ) - if match: - format_spec, var_name = match.groups() - data_type = self._c_format_to_type(format_spec) - patterns.append( - InputPattern( - type="scanf", - line_number=i, - variable_name=var_name, - prompt_message=f"Enter {data_type} value for {var_name}", - data_type=data_type, - raw_code=line.strip(), - ) - ) - continue - - # Pattern 2: cin >> variable (C++) - match = re.search(r"cin\s*>>\s*(\w+)", line) - if match: - var_name = match.group(1) - patterns.append( - InputPattern( - type="cin", - line_number=i, - variable_name=var_name, - prompt_message=f"Enter value for {var_name}", - data_type="unknown", - raw_code=line.strip(), - ) - ) - continue - - # Pattern 3: getline(cin, variable) for strings - match = re.search(r"getline\s*\(\s*cin\s*,\s*(\w+)\s*\)", line) - if match: - var_name = match.group(1) - patterns.append( - InputPattern( - type="getline", - line_number=i, - variable_name=var_name, - prompt_message=f"Enter string for {var_name}", - data_type="string", - raw_code=line.strip(), - ) - ) - - return patterns - - def _java_method_to_type(self, method: str) -> str: - """Convert Java Scanner method to data type""" - type_mapping = { - "Int": "int", - "Double": "double", - "Float": "float", - "Long": "long", - "Line": "string", - "": "string", - } - return type_mapping.get(method, "string") - - def _c_format_to_type(self, format_spec: str) -> str: - """Convert C format specifier to data type""" - if "%d" in format_spec or "%i" in format_spec: - return "int" - elif "%f" in format_spec: - return "float" - elif "%lf" in format_spec: - return "double" - elif "%c" in format_spec: - return "char" - elif "%s" in format_spec: - return "string" - return "unknown" - - def _generate_input_suggestions( - self, patterns: List[InputPattern], language: str - ) -> List[str]: - """Generate UI suggestions based on detected patterns""" - suggestions = [] - - if not patterns: - suggestions.append( - "No input patterns detected. Code will run without user input." - ) - return suggestions - - suggestions.append( - f"Detected {len(patterns)} input requirement(s) in {language} code:" - ) - - for i, pattern in enumerate(patterns, 1): - if pattern.variable_name: - suggestions.append( - f"{i}. Line {pattern.line_number}: {pattern.prompt_message} " - f"(Variable: {pattern.variable_name}, Type: {pattern.data_type})" - ) - else: - suggestions.append( - f"{i}. Line {pattern.line_number}: {pattern.prompt_message} " - f"(Type: {pattern.data_type})" - ) - - suggestions.append( - "Please provide input values in the order they appear in the code." - ) - - return suggestions - - async def execute_code( - self, - language: str, - main_files: List[str], - workspace: str, - input_data: Optional[List[str]] = None, - ) -> ExecutionResult: - """Execute code based on language with optional input data""" - try: - if language == "python": - return await self._execute_python(main_files, workspace, input_data) - elif language == "java": - return await self._execute_java(main_files, workspace, input_data) - elif language in ["c", "cpp"]: - return await self._execute_c_cpp( - main_files, workspace, language, input_data - ) - else: - raise ValueError(f"Unsupported language: {language}") - except Exception as e: - return ExecutionResult( - success=False, - stdout="", - stderr=str(e), - execution_time=0, - exit_code=-1, - error=str(e), - ) - - async def _execute_with_input( - self, command: List[str], workspace: str, input_data: Optional[List[str]] = None - ) -> tuple: - """Execute process with input data""" - process = await asyncio.create_subprocess_exec( - *command, - cwd=workspace, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - stdin=asyncio.subprocess.PIPE, - preexec_fn=( - self.set_resource_limits if platform.system() == "Linux" else None - ), - ) - - # Prepare input string - stdin_input = None - if input_data: - stdin_input = "\n".join(input_data) + "\n" - stdin_input = stdin_input.encode("utf-8") - - try: - stdout, stderr = await asyncio.wait_for( - process.communicate(input=stdin_input), timeout=MAX_EXECUTION_TIME - ) - return stdout, stderr, process.returncode - except asyncio.TimeoutError: - process.kill() - await process.wait() - raise asyncio.TimeoutError() - - async def _execute_python( - self, - main_files: List[str], - workspace: str, - input_data: Optional[List[str]] = None, - ) -> ExecutionResult: - """Execute Python code with input support""" - results = [] - - for main_file in main_files: - file_path = os.path.join(workspace, main_file) - if not os.path.exists(file_path): - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=f"File not found: {main_file}", - execution_time=0, - exit_code=-1, - ) - ) - continue - - try: - start_time = asyncio.get_event_loop().time() - - stdout, stderr, returncode = await self._execute_with_input( - ["python3", main_file], workspace, input_data - ) - - execution_time = asyncio.get_event_loop().time() - start_time - - results.append( - ExecutionResult( - success=returncode == 0, - stdout=stdout.decode("utf-8", errors="replace"), - stderr=stderr.decode("utf-8", errors="replace"), - execution_time=execution_time, - exit_code=returncode, - ) - ) - - except asyncio.TimeoutError: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr="Execution timeout exceeded", - execution_time=MAX_EXECUTION_TIME, - exit_code=-1, - ) - ) - except Exception as e: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=str(e), - execution_time=0, - exit_code=-1, - error=str(e), - ) - ) - - return self._combine_results(results, main_files) - - async def _execute_java( - self, - main_files: List[str], - workspace: str, - input_data: Optional[List[str]] = None, - ) -> ExecutionResult: - """Compile and execute Java code with input support""" - - # Check if we have .java files to compile - java_files = list(Path(workspace).glob("*.java")) - needs_compilation = len(java_files) > 0 - - # If we have .java files, compile them - if needs_compilation: - logger.info(f"Found {len(java_files)} Java source files, compiling...") - - compile_process = await asyncio.create_subprocess_exec( - "javac", - *[str(f) for f in java_files], - cwd=workspace, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - ) - - stdout, stderr = await compile_process.communicate() - - if compile_process.returncode != 0: - return ExecutionResult( - success=False, - stdout="", - stderr=f"Compilation failed:\n{stderr.decode('utf-8', errors='replace')}", - execution_time=0, - exit_code=compile_process.returncode, - ) - - logger.info("Java compilation successful") - else: - # Check if we have .class files for the main files - class_files_missing = [] - for main_file in main_files: - if main_file.endswith(".class"): - class_file_path = os.path.join(workspace, main_file) - else: - class_file_path = os.path.join(workspace, f"{main_file}.class") - - if not os.path.exists(class_file_path): - class_files_missing.append(main_file) - - if class_files_missing: - return ExecutionResult( - success=False, - stdout="", - stderr=f"No Java source files found and missing .class files for: {', '.join(class_files_missing)}", - execution_time=0, - exit_code=-1, - ) - - logger.info("Using existing .class files, skipping compilation") - - # Execute main files - results = [] - for main_file in main_files: - # Determine class name - if main_file.endswith(".class"): - class_name = main_file.replace(".class", "") - elif main_file.endswith(".java"): - class_name = main_file.replace(".java", "") - else: - class_name = main_file - - # Verify the .class file exists - class_file_path = os.path.join(workspace, f"{class_name}.class") - if not os.path.exists(class_file_path): - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=f"Class file not found: {class_name}.class", - execution_time=0, - exit_code=-1, - ) - ) - continue - - try: - start_time = asyncio.get_event_loop().time() - - stdout, stderr, returncode = await self._execute_with_input( - ["java", class_name], workspace, input_data - ) - - execution_time = asyncio.get_event_loop().time() - start_time - - results.append( - ExecutionResult( - success=returncode == 0, - stdout=stdout.decode("utf-8", errors="replace"), - stderr=stderr.decode("utf-8", errors="replace"), - execution_time=execution_time, - exit_code=returncode, - ) - ) - - except asyncio.TimeoutError: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr="Execution timeout exceeded", - execution_time=MAX_EXECUTION_TIME, - exit_code=-1, - ) - ) - except Exception as e: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=str(e), - execution_time=0, - exit_code=-1, - error=str(e), - ) - ) - - return self._combine_results(results, main_files) - - async def _execute_c_cpp( - self, - main_files: List[str], - workspace: str, - language: str, - input_data: Optional[List[str]] = None, - ) -> ExecutionResult: - """Compile and execute C/C++ code with input support""" - compiler = self.compilers[language] - results = [] - - for main_file in main_files: - file_path = os.path.join(workspace, main_file) - if not os.path.exists(file_path): - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=f"File not found: {main_file}", - execution_time=0, - exit_code=-1, - ) - ) - continue - - # Output binary name - output_name = main_file.replace(".c", "").replace(".cpp", "") - - # Compile - compile_process = await asyncio.create_subprocess_exec( - compiler, - main_file, - "-o", - output_name, - cwd=workspace, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - ) - - stdout, stderr = await compile_process.communicate() - - if compile_process.returncode != 0: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=f"Compilation failed:\n{stderr.decode('utf-8', errors='replace')}", - execution_time=0, - exit_code=compile_process.returncode, - ) - ) - continue - - # Execute - try: - start_time = asyncio.get_event_loop().time() - - stdout, stderr, returncode = await self._execute_with_input( - [f"./{output_name}"], workspace, input_data - ) - - execution_time = asyncio.get_event_loop().time() - start_time - - results.append( - ExecutionResult( - success=returncode == 0, - stdout=stdout.decode("utf-8", errors="replace"), - stderr=stderr.decode("utf-8", errors="replace"), - execution_time=execution_time, - exit_code=returncode, - ) - ) - - except asyncio.TimeoutError: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr="Execution timeout exceeded", - execution_time=MAX_EXECUTION_TIME, - exit_code=-1, - ) - ) - except Exception as e: - results.append( - ExecutionResult( - success=False, - stdout="", - stderr=str(e), - execution_time=0, - exit_code=-1, - error=str(e), - ) - ) - - return self._combine_results(results, main_files) - - def _combine_results( - self, results: List[ExecutionResult], main_files: List[str] - ) -> ExecutionResult: - """Combine multiple execution results""" - if len(results) == 1: - return results[0] - else: - combined_stdout = "\n".join( - [f"=== {main_files[i]} ===\n{r.stdout}" for i, r in enumerate(results)] - ) - combined_stderr = "\n".join( - [ - f"=== {main_files[i]} ===\n{r.stderr}" - for i, r in enumerate(results) - if r.stderr - ] - ) - total_time = sum(r.execution_time for r in results) - all_success = all(r.success for r in results) - - return ExecutionResult( - success=all_success, - stdout=combined_stdout, - stderr=combined_stderr, - execution_time=total_time, - exit_code=0 if all_success else -1, - ) - - -def get_directory_size(directory_path: str) -> int: - """Calculate total size of directory in bytes""" - total_size = 0 - try: - for dirpath, dirnames, filenames in os.walk(directory_path): - for filename in filenames: - filepath = os.path.join(dirpath, filename) - if os.path.exists(filepath): - total_size += os.path.getsize(filepath) - except Exception as e: - print(f"Error calculating directory size: {e}") - return float("inf") - return total_size - - -def validate_upload_size(files: List[UploadFile]) -> tuple[bool, int]: - """Validate total size of uploaded files""" - total_size = 0 - for file in files: - if hasattr(file, "size") and file.size: - total_size += file.size - else: - return True, 0 - return total_size <= MAX_TOTAL_SIZE, total_size - - -# Create executor instance -executor = CodeExecutor() - - -async def clone_repo(repo_url: str, workspace: str): - """Clone a git repository into the workspace.""" - try: - process = await asyncio.create_subprocess_exec( - "git", - "clone", - repo_url, - ".", - cwd=workspace, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - ) - stdout, stderr = await process.communicate() - if process.returncode != 0: - raise HTTPException( - status_code=400, detail=f"Failed to clone repository: {stderr.decode()}" - ) - - repo_size = get_directory_size(workspace) - if repo_size > MAX_TOTAL_SIZE: - raise HTTPException( - status_code=400, - detail=f"Repository size ({repo_size / 1024 / 1024:.2f}MB) exceeds limit ({MAX_TOTAL_SIZE / 1024 / 1024:.2f}MB)", - ) - - except HTTPException: - raise - except Exception as e: - raise HTTPException( - status_code=500, detail=f"Error cloning repository: {str(e)}" - ) - - -def detect_language_from_files(main_files: List[str]) -> str: - """Detect programming language from file extensions""" - if not main_files: - raise HTTPException(status_code=400, detail="No main files provided") - - first_file = main_files[0] - if "." not in first_file: - java_extensions = [".java", ".class"] - for file in main_files: - if any(file.endswith(ext) for ext in java_extensions): - return "java" - - raise HTTPException( - status_code=400, - detail=f"Cannot detect language: file '{first_file}' has no extension", - ) - - extension = first_file.split(".")[-1].lower() - - extension_to_language = { - "py": "python", - "java": "java", - "class": "java", - "c": "c", - "cpp": "cpp", - "cc": "cpp", - "cxx": "cpp", - "c++": "cpp", - } - - if extension not in extension_to_language: - supported_extensions = ", ".join(extension_to_language.keys()) - raise HTTPException( - status_code=400, - detail=f"Unsupported file extension '.{extension}'. Supported extensions: {supported_extensions}", - ) - - detected_language = extension_to_language[extension] - - for file in main_files: - if "." in file: - file_ext = file.split(".")[-1].lower() - file_language = extension_to_language.get(file_ext) - if file_language != detected_language: - raise HTTPException( - status_code=400, - detail=f"Mixed languages detected: '{first_file}' ({detected_language}) and '{file}' ({file_language})", - ) - - return detected_language - - -@app.post("/analyze-inputs") -async def analyze_inputs(code_content: str = Form(...), language: str = Form(...)): - """ - Analyze code content to detect input patterns with caching - - Simple API that takes code content and language, returns input patterns - - Args: - code_content: The source code content to analyze - language: Programming language (python, java, c, cpp) - - Returns: - InputAnalysisResult with detected input patterns - """ - try: - # Validate language - supported_languages = ["python", "java", "c", "cpp"] - if language.lower() not in supported_languages: - raise HTTPException( - status_code=400, - detail=f"Unsupported language: {language}. Supported: {supported_languages}", - ) - - # Create a simple file contents dict for analysis - file_contents_dict = {"main": code_content} - - analysis_result = executor.analyze_input_patterns( - language.lower(), file_contents_dict - ) - result_dict = analysis_result.model_dump() - - # Cache the result - - logger.info(f"Input analysis completed for {language} code") - return JSONResponse(content=result_dict) - - except HTTPException: - raise - except Exception as e: - logger.error(f"Error analyzing inputs: {str(e)}") - raise HTTPException(status_code=500, detail=f"Analysis failed: {str(e)}") - - -@app.post("/judge") -async def judge_code( - main_files: str = Form(...), - files: List[UploadFile] = File(None), - repo_url: Optional[str] = Form(None), - input_data: Optional[str] = Form(None), # JSON array of input strings -): - """ - Judge code submission with optional input data - - - main_files: JSON array of main files to execute - - files: Multiple files maintaining folder structure - - repo_url: Git repository URL (alternative to files) - - input_data: JSON array of input strings for programs that require user input - """ - # Parse main_files - try: - main_files_list = json.loads(main_files) - except json.JSONDecodeError: - raise HTTPException(status_code=400, detail="Invalid main_files format") - - # Parse input_data if provided - input_list = None - logger.info(f"Received input_data: {input_data}") - if input_data: - try: - input_list = json.loads(input_data) - if not isinstance(input_list, list): - raise ValueError("Input data must be an array") - except (json.JSONDecodeError, ValueError): - raise HTTPException( - status_code=400, detail="Invalid input_data format - must be JSON array" - ) - - # Auto-detect language from file extensions - language = detect_language_from_files(main_files_list) - - # Validate input: either files or repo_url must be provided - if not files and not repo_url: - raise HTTPException( - status_code=400, detail="Either files or repo_url must be provided" - ) - if files and repo_url: - raise HTTPException( - status_code=400, detail="Provide either files or repo_url, not both" - ) - - # Create temporary workspace - workspace = None - try: - # Create unique temporary directory - workspace = tempfile.mkdtemp(prefix=f"judge_{uuid.uuid4().hex}_") - - if repo_url: - # Clone repository - await clone_repo(repo_url, workspace) - else: - # Validate total upload size - total_upload_size = 0 - file_contents = [] - - # Pre-read all files to check total size - for file in files: - content = await file.read() - if len(content) > MAX_FILE_SIZE: - raise HTTPException( - status_code=400, - detail=f"File {file.filename} exceeds individual size limit", - ) - - total_upload_size += len(content) - file_contents.append((file.filename, content)) - - # Check total size limit - if total_upload_size > MAX_TOTAL_SIZE: - raise HTTPException( - status_code=400, - detail=f"Total upload size ({total_upload_size / 1024 / 1024:.2f}MB) exceeds limit ({MAX_TOTAL_SIZE / 1024 / 1024:.2f}MB)", - ) - - # Save uploaded files maintaining structure - for filename, content in file_contents: - # Create file path - file_path = os.path.join(workspace, filename) - - # Create directories if needed - os.makedirs(os.path.dirname(file_path), exist_ok=True) - - # Write file - with open(file_path, "wb") as f: - f.write(content) - - # Execute code with input data - result = await executor.execute_code( - language, main_files_list, workspace, input_list - ) - logger.info(f"Execution result: {result}") - return JSONResponse(content=result.model_dump()) - - except Exception as e: - raise HTTPException(status_code=500, detail=str(e)) - - finally: - # Clean up temporary files - if workspace and os.path.exists(workspace): - try: - shutil.rmtree(workspace) - except Exception as e: - print(f"Error cleaning up workspace: {e}") - - -@app.get("/languages") -async def get_supported_languages(): - """Get supported programming languages with auto-detection info""" - return { - "languages": [ - {"id": "python", "name": "Python 3", "extensions": [".py"]}, - {"id": "java", "name": "Java", "extensions": [".java", ".class"]}, - {"id": "c", "name": "C", "extensions": [".c"]}, - {"id": "cpp", "name": "C++", "extensions": [".cpp", ".cc", ".cxx", ".c++"]}, - ], - "note": "Language is automatically detected from file extensions. For Java, both source (.java) and compiled (.class) files are supported.", - "input_support": "All languages support automatic input detection and handling for interactive programs.", - } - - -# Example usage endpoints for testing -@app.get("/examples/input-patterns") -async def get_input_pattern_examples(): - """Get examples of supported input patterns for each language""" - return { - "python": [ - 'name = input("Enter your name: ")', - 'age = int(input("Enter your age: "))', - 'score = float(input("Enter score: "))', - "input() # Simple input without assignment", - ], - "java": [ - "String name = scanner.nextLine();", - "int age = scanner.nextInt();", - "double score = scanner.nextDouble();", - "scanner.next(); # Direct call", - ], - "c": ['scanf("%s", name);', 'scanf("%d", &age);', 'scanf("%f", &score);'], - "cpp": ["cin >> name;", "cin >> age;", "getline(cin, fullName);"], - } - - -@app.post("/test-input-analysis") -async def test_input_analysis(): - """Test endpoint with sample code for input analysis""" - - # Sample Python code with inputs - sample_code = { - "main.py": """ -name = input("Enter your name: ") -age = int(input("Enter your age: ")) -score = float(input("Enter your score: ")) - -print(f"Hello {name}") -print(f"You are {age} years old") -print(f"Your score is {score}") - -# Simple input without assignment -input("Press enter to continue...") -""" - } - - # Test the analysis - analysis_result = executor.analyze_input_patterns("python", sample_code) - - return { - "sample_code": sample_code, - "analysis": analysis_result.model_dump(), - "suggested_inputs": [ - "John Doe", # for name - "25", # for age - "95.5", # for score - "", # for press enter - ], - } +from dotenv import load_dotenv + +load_dotenv() +from src.apis.create_app import create_app, api_router +import uvicorn + + +app = create_app() + +app.include_router(api_router) +if __name__ == "__main__": + uvicorn.run("app:app", host="0.0.0.0", port=8000, reload=False) diff --git a/requirements.txt b/requirements.txt index 1af8822c2cc453dd4c87598d59fa2011adac7288..91cee1573582b1d9f6b652b685cf4c96ab7fed3d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,14 @@ -fastapi>=0.104.0 -uvicorn[standard]>=0.24.0 -python-multipart>=0.0.6 -pydantic>=2.4.0 -loguru>=0.7.0 -aiofiles>=23.0.0 -httpx>=0.25.0 \ No newline at end of file +fastapi +uvicorn +python-dateutil +pandas +openpyxl +redis +bs4 +pytz +langgraph +langchain +langgraph-swarm +langchain-google-genai +python-dotenv +loguru \ No newline at end of file diff --git a/src/.DS_Store b/src/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..032054da8230c5f808bb27913dfedfa0a06ed03a Binary files /dev/null and b/src/.DS_Store differ diff --git a/src/Untitled.ipynb b/src/Untitled.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d5fd680106ddc7a89e34390f7faf1f743f66c49f --- /dev/null +++ b/src/Untitled.ipynb @@ -0,0 +1,51 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "id": "00ef7a54-9c8a-4235-82d1-9df1ac5f2967", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "hello\n" + ] + } + ], + "source": [ + "print('hello')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "d2e19d62-d6ba-4cb7-ac8c-b965cf65b2d7", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.9" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/src/__pycache__/__init__.cpython-311.pyc b/src/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f411ae2a7fb755dacd11e2b07871897118ad82d Binary files /dev/null and b/src/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/__pycache__/app.cpython-311.pyc b/src/__pycache__/app.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a029f67b6b58d5bdb814e8dd5bd31e097c8a3779 Binary files /dev/null and b/src/__pycache__/app.cpython-311.pyc differ diff --git a/src/__pycache__/state.cpython-311.pyc b/src/__pycache__/state.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9e8755ec932c515b48a691bc66b29b50ed5cd650 Binary files /dev/null and b/src/__pycache__/state.cpython-311.pyc differ diff --git a/src/agents/.DS_Store b/src/agents/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..71948439baf336cc7cbe5c576d3fd3dedc5acad9 Binary files /dev/null and b/src/agents/.DS_Store differ diff --git a/src/agents/base/flow.py b/src/agents/base/flow.py new file mode 100644 index 0000000000000000000000000000000000000000..4c0e01b72d728ac0dccafda1ab4d4b20906ae76e --- /dev/null +++ b/src/agents/base/flow.py @@ -0,0 +1,22 @@ +from langgraph.graph import StateGraph, START, END +from .func import State +from langgraph.graph.state import CompiledStateGraph +from langgraph.store.memory import InMemoryStore +class PrimaryChatBot: + def __init__(self): + self.builder = StateGraph(State) + + @staticmethod + def routing(state: State): + pass + + def node(self): + pass + + def edge(self): + pass + + def __call__(self) -> CompiledStateGraph: + self.node() + self.edge() + return self.builder.compile(checkpointer=InMemoryStore()) diff --git a/src/agents/base/func.py b/src/agents/base/func.py new file mode 100644 index 0000000000000000000000000000000000000000..c1a8c978737a1c7fb2919b7c3fd9581a1234d5fe --- /dev/null +++ b/src/agents/base/func.py @@ -0,0 +1,4 @@ +from typing import TypedDict + +class State(TypedDict): + pass \ No newline at end of file diff --git a/src/agents/base/prompt.py b/src/agents/base/prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..2cee8d3e00570f5d9fdbfc7263f119fc1bcea0c4 --- /dev/null +++ b/src/agents/base/prompt.py @@ -0,0 +1,14 @@ +from langchain_core.prompts import ChatPromptTemplate + + +base_prompt = ChatPromptTemplate.from_messages( + [ + ( + "system", + """Vai trò + +""", + ), + ("placeholder", "{messages}"), + ] +) diff --git a/src/agents/base/tools.py b/src/agents/base/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..0e55fd962802611e69b1563885e65e244e30040f --- /dev/null +++ b/src/agents/base/tools.py @@ -0,0 +1,16 @@ +from langchain_core.tools import tool +from loguru import logger + + +@tool +def function_name( + input: str, +) -> str: + """ + Mô tả chức năng của hàm này. + """ + logger.info(f"Received input: {input}") + # Thực hiện các thao tác cần thiết với input + result = f"Processed: {input}" + logger.info(f"Returning result: {result}") + return result diff --git a/src/agents/role_play/__pycache__/func.cpython-311.pyc b/src/agents/role_play/__pycache__/func.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ebb136b0e31d9db54e018ce1df70a8c3b72dd44d Binary files /dev/null and b/src/agents/role_play/__pycache__/func.cpython-311.pyc differ diff --git a/src/agents/role_play/__pycache__/prompt.cpython-311.pyc b/src/agents/role_play/__pycache__/prompt.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2e112ea3ad3ba8629f1bae72fb46603eec374cdf Binary files /dev/null and b/src/agents/role_play/__pycache__/prompt.cpython-311.pyc differ diff --git a/src/agents/role_play/__pycache__/scenarios.cpython-311.pyc b/src/agents/role_play/__pycache__/scenarios.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..4dc006fd79b891ef8986c977f4290e6b763a38b1 Binary files /dev/null and b/src/agents/role_play/__pycache__/scenarios.cpython-311.pyc differ diff --git a/src/agents/role_play/flow.py b/src/agents/role_play/flow.py new file mode 100644 index 0000000000000000000000000000000000000000..53b4cdac495c6dfa29fb758906b4b6c3ab2d0628 --- /dev/null +++ b/src/agents/role_play/flow.py @@ -0,0 +1,24 @@ +from langgraph.graph import StateGraph, START, END +from .func import State +from langgraph.graph.state import CompiledStateGraph +from langgraph.store.memory import InMemoryStore + + +class PrimaryChatBot: + def __init__(self): + self.builder = StateGraph(State) + + @staticmethod + def routing(state: State): + pass + + def node(self): + pass + + def edge(self): + pass + + def __call__(self) -> CompiledStateGraph: + self.node() + self.edge() + return self.builder.compile(checkpointer=InMemoryStore()) diff --git a/src/agents/role_play/func.py b/src/agents/role_play/func.py new file mode 100644 index 0000000000000000000000000000000000000000..f00d23ec61a5c83096f4bcdd76ec4dd235209574 --- /dev/null +++ b/src/agents/role_play/func.py @@ -0,0 +1,55 @@ +from typing import TypedDict +from src.config.llm import model +from langgraph.checkpoint.memory import InMemorySaver +from langgraph.prebuilt import create_react_agent +from langgraph_swarm import create_handoff_tool, create_swarm +from .prompt import roleplay_prompt, guiding_prompt + + +class State(TypedDict): + pass + + +def create_agents(scenario, checkpointer=InMemorySaver()): + + roleplay_agent = create_react_agent( + model, + [ + create_handoff_tool( + agent_name="Guiding Agent", + description="Hand off to Guiding Agent when user shows signs of needing help, guidance, or struggles with communication", + ), + ], + prompt=roleplay_prompt.format( + scenario_title=scenario["scenario_title"], + scenario_description=scenario["scenario_description"], + scenario_context=scenario["scenario_context"], + your_role=scenario["your_role"], + key_vocabulary=scenario["key_vocabulary"], + ), + name="Roleplay Agent", + ) + + guiding_agent = create_react_agent( + model, + [ + create_handoff_tool( + agent_name="Roleplay Agent", + description="Hand off back to Roleplay Agent when user is ready for scenario practice and shows improved confidence", + ), + ], + prompt=guiding_prompt.format( + scenario_title=scenario["scenario_title"], + scenario_description=scenario["scenario_description"], + scenario_context=scenario["scenario_context"], + your_role=scenario["your_role"], + key_vocabulary=scenario["key_vocabulary"], + ), + name="Guiding Agent", + ) + + workflow = create_swarm( + [roleplay_agent, guiding_agent], default_active_agent="Roleplay Agent" + ) + + return workflow.compile(checkpointer) diff --git a/src/agents/role_play/prompt.py b/src/agents/role_play/prompt.py new file mode 100644 index 0000000000000000000000000000000000000000..38486890add39062d85578393f9892458dbdfab6 --- /dev/null +++ b/src/agents/role_play/prompt.py @@ -0,0 +1,187 @@ +from langchain_core.prompts import ChatPromptTemplate + +roleplay_prompt = """# Role: Roleplay Partner - Part of the Wise Tutor System + +You are part of Wise Tutor, a friendly English learning companion. You're the roleplay specialist who creates natural, authentic conversations with learners. + +## About Wise Tutor: +If asked about your name or identity, respond warmly: "I'm part of Wise Tutor! I'm here to help you practice English through fun conversations. Think of me as your friendly conversation partner!" + +## CRITICAL LANGUAGE RULE: +**IF USER SPEAKS ANY LANGUAGE OTHER THAN ENGLISH → IMMEDIATELY HAND OFF TO GUIDING AGENT** +This is an English learning experience. Users need to practice English for roleplay. + +## Current Scenario Context: +{scenario_title}: {scenario_description} +Setting: {scenario_context} +Your role: {your_role} +Key vocabulary: {key_vocabulary} + +## Your Mission: +- Be a real, authentic person in your role ({your_role}) +- Have genuine conversations, not rehearsed interactions +- React naturally to what users say - show surprise, interest, concern +- Make the experience feel like talking to a real person +- ONLY engage in roleplay when user speaks English + +## Language Detection Protocol: +### If User Uses Non-English: +- **DO NOT respond in their language** +- **DO NOT continue roleplay** +- **IMMEDIATELY hand off to Guiding Agent** + +### If User Uses English: +- Continue with natural roleplay as described below + +## Natural Conversation Principles: + +### Be Authentically Human: +- Show genuine emotions and reactions +- Use conversational language, not formal scripts +- Respond to the unexpected - be flexible +- Have your own personality as the character +- Make small talk when appropriate + +### Stay True to Your Role: +- Embody the character realistically ({your_role}) +- Use language and knowledge appropriate to your role +- Have realistic limitations and expertise +- React as this person would in real situations + +### Create Real Interactions: +- Build genuine rapport with the user +- Remember details they share and reference them later +- Ask follow-up questions that show you're listening +- Share appropriate details about yourself/your work +- Handle awkward moments gracefully + +### Supportive Language Learning: +- If users make small errors, naturally model correct language without stopping the flow +- Focus on communication over perfection +- Encourage them by showing interest in what they're saying +- Make them feel comfortable making mistakes + +## Natural Handoff Triggers: +- **User speaks non-English** (immediate handoff) +- User seems genuinely lost or confused multiple times +- User explicitly asks for language help +- Communication completely breaks down +- User appears frustrated with language barriers + +## Conversation Flow: +- Start conversations naturally for the situation +- Let conversations develop organically +- Don't force vocabulary usage - let it emerge naturally +- End conversations naturally when appropriate + +Remember: +- You're not a teacher in roleplay mode - you're a real person doing your job who happens to be patient with English learners. Be genuine, warm, and human! +- Not respond too long or too short. But also can adapt to the user's communication style and needs. +""" + +guiding_prompt = """# Role: Guiding Agent - Supportive Language Helper & Language Router + +You are a patient, supportive language guide who helps users when they struggle with English communication. You are also responsible for handling users who speak non-English languages. + +## Current Scenario Context: +{scenario_title}: {scenario_description} +Key vocabulary for this scenario: {key_vocabulary} + +## Your Mission: +- Handle users who speak non-English languages and guide them to English +- Help users who are struggling with English communication +- Use simple, clear language and break things down step by step +- Provide examples and choices to make communication easier +- Build confidence and encourage attempts +- Prepare users to return to roleplay when they're ready + +## Language Routing Protocol: + +### When User Speaks Non-English Languages: +**Step 1: Polite Asking** +- Ask users about their current English level in their own language. If they are weak in English, encourage them and start teaching them. + +**Step 2: Teaching** +- If users do not know English, use their own language to teach English, and if they know basic English, use English +- Start with very basic English + +**Step 3: Begin English Teaching** +1. Ask them what they want to learn about related to the current topic. Give them some related suggestions to get feedback. +2. Teach them the related knowledge they need in the scope of learning English related to the topic, so that they have prior knowledge to practice Role-play + + + +## Building from Zero: +1. **Basic greetings**: "Hello", "Hi", "Good morning" +2. **Essential phrases**: "Please", "Thank you", "Excuse me" +3. **Scenario basics**: Start with 2-3 key words for the situation +4. **Simple sentences**: "I want..." "I would like..." "Can I have..." + +## Guiding Principles: + +### Use Simple Language: +- Short, clear sentences +- Basic vocabulary (avoid complex words) +- One concept at a time +- Lots of examples and choices + +### Supportive Approach: +- Encourage every attempt: "Good try!" +- Be patient with mistakes +- Focus on communication, not perfection +- Celebrate small victories +- **Extra patience for non-native speakers**: "Don't worry, English is difficult. You're doing great!" + +### Step-by-Step Help: +- Break complex requests into smaller parts +- Give specific examples they can use +- Offer multiple choice options +- Model the language first +- **For beginners**: Start with single words, then phrases, then sentences + +### Scenario-Specific Guidance: +- Teach key vocabulary for the current scenario +- Explain common phrases used in this situation +- Practice basic interactions before complex ones +- Connect learning to the roleplay context + +## Teaching Techniques: + +### For Absolute Beginners: + +### Vocabulary Building: + + +### Phrase Practice: + + +### Choice-Based Learning: + + +### Error Correction: + + +## Building Confidence: +- "You're doing great! English is difficult, but you're learning fast." +- "Don't worry about mistakes. That's how we learn!" +- "Your English is getting better with each try." +- **For multilingual learners**: "Speaking multiple languages is amazing! English will be easier with practice." + +## Transition Back to Roleplay: +- "Great! You're ready to try the real conversation now." +- "Perfect! Let's go back to the restaurant and practice with the waiter." +- "You know the words now. Let's use them in a real situation!" + +## Signs to Return to Roleplay: +- User gives confident, complete responses IN ENGLISH +- User uses vocabulary correctly in context +- User asks to try the roleplay again +- User shows improved communication flow +- User can form basic sentences (5+ words) comfortably IN ENGLISH + + +Remember: +- Your job is to make English feel easier and help users gain confidence to communicate naturally in English! Always redirect non-English speakers to English practice. +- Be flexible in using the user's language or English based on their confidence to enhance user experience. +- Not respond too long or complexly to avoid overwhelming the user. Because the goal is to build their confidence and skills gradually. +""" diff --git a/src/agents/role_play/scenarios.py b/src/agents/role_play/scenarios.py new file mode 100644 index 0000000000000000000000000000000000000000..b0f501c9256e08b4ba251e58a659d98fcecd64d1 --- /dev/null +++ b/src/agents/role_play/scenarios.py @@ -0,0 +1,195 @@ +SCENARIOS = [ + { + "id": "student_visa_interview", + "scenario_title": "Student Visa Interview", + "scenario_description": "Learn about the challenges and benefits of starting a small business, then share what kind of business you would start. You are a student applying for a student visa. I am the visa interviewer.", + "scenario_context": "A formal visa interview office at an embassy or consulate", + "your_role": "student", + "key_vocabulary": [ + "pursue higher education", + "broaden my horizons", + "international exposure", + "cultural diversity", + "academic excellence", + "globally recognized degree", + "cutting-edge facilities", + "research opportunities", + "tuition fees", + "financial support", + "scholarship", + "sponsorship letter", + "post-graduation plans", + ], + "topics_covered": [ + "Study Plans", + "University Choice", + "Academic Capability", + "Financial Status", + "Post-Graduation Plans", + ], + "follow_up_questions": { + "Study Plans": [ + "Why did you decide to pursue higher education abroad instead of in your home country?", + "How will studying overseas help you broaden your horizons?", + "What challenges do you expect to face in a foreign academic environment?", + "How long is your course, and what is your study schedule like?", + "Have you researched the teaching methods used in your chosen program?", + ], + "University Choice": [ + "Why did you choose this university over others?", + "How do you think the academic excellence of this institution will benefit you?", + "Have you looked into the research opportunities your university offers?", + "How will the cultural diversity on campus influence your experience?", + "What do you know about the city where your university is located?", + ], + "Academic Capability": [ + "How does your academic background prepare you for this course?", + "Can you give an example of a project or subject you excelled at in the past?", + "How will you make use of the cutting-edge facilities provided by the university?", + "How confident are you in adapting to a different academic system?", + "What skills do you think will help you succeed in your studies abroad?", + ], + "Financial Status": [ + "How will you fund your tuition fees and living expenses?", + "Can you provide details about your financial support from family?", + "Do you have a scholarship or proof of funds?", + "How have you prepared financially for unexpected costs?", + "Can you show me your bank statement and sponsorship letter?", + ], + "Post-Graduation Plans": [ + "What will you do after you complete your degree?", + "How will your globally recognized degree help you in your career?", + "Do you plan to work overseas or return to your home country?", + "How will you apply the knowledge acquired during your studies?", + "What is your long-term goal after graduation?", + ], + }, + }, + { + "id": "ordering_at_restaurant", + "scenario_title": "Ordering At A Restaurant", + "scenario_description": "Practice ordering food, asking about the menu, and talking to a waiter. You are a customer at a restaurant. I am the waiter/waitress.", + "scenario_context": "A busy restaurant with various seating options and a diverse menu", + "your_role": "customer", + "key_vocabulary": [ + "table for two, please", + "do you have a reservation?", + "non-smoking section", + "window seat", + "outdoor seating", + "could we have the bill, please?", + "do you take credit cards?", + "signature dish", + "dietary restrictions", + "sauce served separately", + "steak cooked", + "pair with a drink", + "vegetarian options", + "receipt", + "cash or card", + ], + "topics_covered": [ + "Getting a Table", + "Asking About the Menu", + "Making a Food Order", + "Special Requests or Allergies", + "Paying the Bill", + ], + "follow_up_questions": { + "Getting a Table": [ + "Would you like to sit inside or outside?", + "How many people will be joining you today?", + "Do you have a reservation under your name?", + ], + "Asking About the Menu": [ + "Are you looking for something light or filling?", + "Would you like to try our signature dish?", + "Do you prefer meat, seafood, or vegetarian options?", + ], + "Making a Food Order": [ + "Would you like any sides with that?", + "How would you like your steak cooked?", + "Would you like to pair it with a drink?", + ], + "Special Requests or Allergies": [ + "Do you have any dietary restrictions?", + "Should we avoid using certain ingredients?", + "Would you like the sauce served separately?", + ], + "Paying the Bill": [ + "Would you like to pay together or separately?", + "Would you like the receipt?", + "Are you paying by cash or card?", + ], + }, + }, + { + "id": "going_shopping", + "scenario_title": "Going Shopping", + "scenario_description": "Practice asking about prices, sizes, and items while shopping. You are a customer looking for items in a store. I am the shop assistant.", + "scenario_context": "A retail store with various departments and products", + "your_role": "customer", + "key_vocabulary": [ + "excuse me, could you help me?", + "I'm looking for...", + "do you have this in stock?", + "could you show me where it is?", + "what size do you usually wear?", + "would you like to try it on?", + "does it fit comfortably?", + "specific price range", + "casual or formal", + "specific brand", + "different size", + "paying by cash or card", + "would you like a bag?", + "exchange it for another item", + "other options", + ], + "topics_covered": [ + "Asking for Help", + "Describing What You're Looking For", + "Asking About Price or Size", + "Trying or Testing Products", + "Making the Purchase or Returning", + ], + "follow_up_questions": { + "Asking for Help": [ + "How can I assist you today?", + "Are you looking for anything in particular?", + "Do you need help finding your size?", + ], + "Describing What You're Looking For": [ + "What color or style are you interested in?", + "Do you want something casual or formal?", + "Is there a specific brand you prefer?", + ], + "Asking About Price or Size": [ + "Are you looking for something in a specific price range?", + "What size do you usually wear?", + "Would you like to see other options in your size?", + ], + "Trying or Testing Products": [ + "Would you like to try it on?", + "Does it fit comfortably?", + "Do you need a different size?", + ], + "Making the Purchase or Returning": [ + "Will you be paying by cash or card?", + "Would you like a bag for that?", + "Would you like to exchange it for another item?", + ], + }, + }, +] + + +def get_scenarios(): + return SCENARIOS + + +def get_scenario_by_id(scenario_id: str): + for scenario in SCENARIOS: + if scenario["id"] == scenario_id: + return scenario + return None diff --git a/src/agents/role_play/tools.py b/src/agents/role_play/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..0e55fd962802611e69b1563885e65e244e30040f --- /dev/null +++ b/src/agents/role_play/tools.py @@ -0,0 +1,16 @@ +from langchain_core.tools import tool +from loguru import logger + + +@tool +def function_name( + input: str, +) -> str: + """ + Mô tả chức năng của hàm này. + """ + logger.info(f"Received input: {input}") + # Thực hiện các thao tác cần thiết với input + result = f"Processed: {input}" + logger.info(f"Returning result: {result}") + return result diff --git a/src/agents/tools/__pycache__/crawl.cpython-311.pyc b/src/agents/tools/__pycache__/crawl.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d4145c0bab5a4d0ada24be12ecb7f365199fd779 Binary files /dev/null and b/src/agents/tools/__pycache__/crawl.cpython-311.pyc differ diff --git a/src/agents/tools/crawl.py b/src/agents/tools/crawl.py new file mode 100644 index 0000000000000000000000000000000000000000..0ae7561b2f5768d89b99248a52aa5667310e9409 --- /dev/null +++ b/src/agents/tools/crawl.py @@ -0,0 +1,24 @@ +# Copyright (c) 2025 Bytedance Ltd. and/or its affiliates +# SPDX-License-Identifier: MIT + +from typing import Annotated + +from langchain_core.tools import tool + +from src.config.crawler.crawler import Crawler +from loguru import logger + + +@tool +def crawl_tool( + url: Annotated[str, "The url to crawl."], +) -> str: + """Use this to crawl a url and get a readable content in markdown format.""" + try: + crawler = Crawler() + article = crawler.crawl(url) + return {"url": url, "crawled_content": article.to_markdown()[:1000]} + except BaseException as e: + error_msg = f"Failed to crawl. Error: {repr(e)}" + logger.error(error_msg) + return error_msg diff --git a/src/apis/.DS_Store b/src/apis/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..0826263b022ca888213a2b2e7c45c990e6d81467 Binary files /dev/null and b/src/apis/.DS_Store differ diff --git a/src/apis/__pycache__/__init__.cpython-311.pyc b/src/apis/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bd667d13f88b0fbb418859c1e6f8a3fc0542afe Binary files /dev/null and b/src/apis/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/apis/__pycache__/create_app.cpython-311.pyc b/src/apis/__pycache__/create_app.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6459f129bd258cbc95e67e43b39fa60ba83139e6 Binary files /dev/null and b/src/apis/__pycache__/create_app.cpython-311.pyc differ diff --git a/src/apis/config/__pycache__/firebase_config.cpython-311.pyc b/src/apis/config/__pycache__/firebase_config.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0b0da0371123b7f8c858f4ae97beb493666e4ff4 Binary files /dev/null and b/src/apis/config/__pycache__/firebase_config.cpython-311.pyc differ diff --git a/src/apis/create_app.py b/src/apis/create_app.py new file mode 100644 index 0000000000000000000000000000000000000000..44fbb834561846cd38c236400715c8efd7cc49f2 --- /dev/null +++ b/src/apis/create_app.py @@ -0,0 +1,22 @@ +from fastapi import FastAPI, APIRouter +from fastapi.middleware.cors import CORSMiddleware +from src.apis.routes.user_route import router as router_user +from src.apis.routes.chat_route import router as router_chat + +api_router = APIRouter(prefix="/api") +api_router.include_router(router_user) +api_router.include_router(router_chat) + + +def create_app(): + app = FastAPI(docs_url="/", title="API") + + app.add_middleware( + CORSMiddleware, + allow_origins=["*"], + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + ) + + return app diff --git a/src/apis/interfaces/.DS_Store b/src/apis/interfaces/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..def12a9e1644794cbb6f657907cbae2eac696151 Binary files /dev/null and b/src/apis/interfaces/.DS_Store differ diff --git a/src/apis/interfaces/__pycache__/api_interface.cpython-311.pyc b/src/apis/interfaces/__pycache__/api_interface.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..99abda5cc22b1814b46a084c883253f42d4f037b Binary files /dev/null and b/src/apis/interfaces/__pycache__/api_interface.cpython-311.pyc differ diff --git a/src/apis/interfaces/__pycache__/auth_interface.cpython-311.pyc b/src/apis/interfaces/__pycache__/auth_interface.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..39b71f70b418ce10d87ae23f4deb0598fe63e8d7 Binary files /dev/null and b/src/apis/interfaces/__pycache__/auth_interface.cpython-311.pyc differ diff --git a/src/apis/interfaces/api_interface.py b/src/apis/interfaces/api_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..fc60dfeb1b4c614e4dd6bd13181ad2ec9fcce876 --- /dev/null +++ b/src/apis/interfaces/api_interface.py @@ -0,0 +1,5 @@ +from pydantic import BaseModel, Field +from typing import Optional +from src.apis.models.BaseModel import BaseDocument +from typing import List, Union + diff --git a/src/apis/interfaces/auth_interface.py b/src/apis/interfaces/auth_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..fdbe5d364437f07767acaf0518d59d113b929e0d --- /dev/null +++ b/src/apis/interfaces/auth_interface.py @@ -0,0 +1,2 @@ +from pydantic import BaseModel, Field + diff --git a/src/apis/middlewares/__pycache__/auth_middleware.cpython-311.pyc b/src/apis/middlewares/__pycache__/auth_middleware.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..af1e0aad57ec717af754591eb1c5d7b7720688be Binary files /dev/null and b/src/apis/middlewares/__pycache__/auth_middleware.cpython-311.pyc differ diff --git a/src/apis/middlewares/auth_middleware.py b/src/apis/middlewares/auth_middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/apis/models/.DS_Store b/src/apis/models/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..9a620b719fb9ba0c899b07b351695bf0c8c0ae6e Binary files /dev/null and b/src/apis/models/.DS_Store differ diff --git a/src/apis/models/BaseModel.py b/src/apis/models/BaseModel.py new file mode 100644 index 0000000000000000000000000000000000000000..2a493e26823f200faac120086bf43c3ad792f76d --- /dev/null +++ b/src/apis/models/BaseModel.py @@ -0,0 +1,17 @@ +from pydantic import BaseModel, Field +from typing import Optional +from datetime import datetime +from src.utils.logger import get_date_time + + +class BaseDocument(BaseModel): + created_at: Optional[datetime] = Field( + default_factory=lambda: get_date_time().replace(tzinfo=None) + ) + updated_at: Optional[datetime] = Field( + default_factory=lambda: get_date_time().replace(tzinfo=None) + ) + expire_at: Optional[datetime] = None + + class Config: + arbitrary_types_allowed = True diff --git a/src/apis/models/__pycache__/BaseModel.cpython-311.pyc b/src/apis/models/__pycache__/BaseModel.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f97a2d29ee7d91a7309f9ab06f14bd5f63a53fd2 Binary files /dev/null and b/src/apis/models/__pycache__/BaseModel.cpython-311.pyc differ diff --git a/src/apis/models/__pycache__/destination_models.cpython-311.pyc b/src/apis/models/__pycache__/destination_models.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3d0eaef51649f1e2b755347f552aef34cd9e6d82 Binary files /dev/null and b/src/apis/models/__pycache__/destination_models.cpython-311.pyc differ diff --git a/src/apis/models/__pycache__/hotel_models.cpython-311.pyc b/src/apis/models/__pycache__/hotel_models.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..68db065dbb4398aebe5b89eedc336f7a6c15a10e Binary files /dev/null and b/src/apis/models/__pycache__/hotel_models.cpython-311.pyc differ diff --git a/src/apis/models/__pycache__/post_models.cpython-311.pyc b/src/apis/models/__pycache__/post_models.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fe0429f8ecd5590c391a02fbb26d0822d2dfa242 Binary files /dev/null and b/src/apis/models/__pycache__/post_models.cpython-311.pyc differ diff --git a/src/apis/models/__pycache__/schedule_models.cpython-311.pyc b/src/apis/models/__pycache__/schedule_models.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..886ac11f84dfcfc9e301908f1ceb52144f235ac2 Binary files /dev/null and b/src/apis/models/__pycache__/schedule_models.cpython-311.pyc differ diff --git a/src/apis/models/__pycache__/user_models.cpython-311.pyc b/src/apis/models/__pycache__/user_models.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..098b8979a7818053c7bababe14648b03e0a1949f Binary files /dev/null and b/src/apis/models/__pycache__/user_models.cpython-311.pyc differ diff --git a/src/apis/models/user_models.py b/src/apis/models/user_models.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/src/apis/providers/.DS_Store b/src/apis/providers/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..f512b623c524c7d6b402854b24937b3fa883a4f7 Binary files /dev/null and b/src/apis/providers/.DS_Store differ diff --git a/src/apis/providers/__pycache__/__init__.cpython-311.pyc b/src/apis/providers/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8b1ed88f2c96a0c8c4ed0030c7676a6f7bfdf9a Binary files /dev/null and b/src/apis/providers/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/apis/providers/__pycache__/firebase_provider.cpython-311.pyc b/src/apis/providers/__pycache__/firebase_provider.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7cc3a1c5ca42e61128b431f222de85ac7c969e93 Binary files /dev/null and b/src/apis/providers/__pycache__/firebase_provider.cpython-311.pyc differ diff --git a/src/apis/providers/__pycache__/jwt_provider.cpython-311.pyc b/src/apis/providers/__pycache__/jwt_provider.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2a2f2f39816284881762bcac97c3f35286b190d Binary files /dev/null and b/src/apis/providers/__pycache__/jwt_provider.cpython-311.pyc differ diff --git a/src/apis/routes/.DS_Store b/src/apis/routes/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..07643a697dd53e007a50d2c3afd09b9a570ea41f Binary files /dev/null and b/src/apis/routes/.DS_Store differ diff --git a/src/apis/routes/__pycache__/admin_route.cpython-311.pyc b/src/apis/routes/__pycache__/admin_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..122fcf4dea04a656ca424a8c3d37cf0025f6b1c5 Binary files /dev/null and b/src/apis/routes/__pycache__/admin_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/alert_zone_route.cpython-311.pyc b/src/apis/routes/__pycache__/alert_zone_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..058f4e6edf7e893e51cda2a580f22909e85d4f95 Binary files /dev/null and b/src/apis/routes/__pycache__/alert_zone_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/auth_route.cpython-311.pyc b/src/apis/routes/__pycache__/auth_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d87db2aecb2ddf22e51e417e146cfeb5fc3e0e8d Binary files /dev/null and b/src/apis/routes/__pycache__/auth_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/chat_route.cpython-311.pyc b/src/apis/routes/__pycache__/chat_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e1cef5f25fe39310c3b9d6cca2014ee144854814 Binary files /dev/null and b/src/apis/routes/__pycache__/chat_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/comment_route.cpython-311.pyc b/src/apis/routes/__pycache__/comment_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ac7ecb50db41430c4bd11edfb960b1ad8775d494 Binary files /dev/null and b/src/apis/routes/__pycache__/comment_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/hotel_route.cpython-311.pyc b/src/apis/routes/__pycache__/hotel_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..11e130f5a5c66e5a35d6a905a80dc419aefd2c07 Binary files /dev/null and b/src/apis/routes/__pycache__/hotel_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/inference_route.cpython-311.pyc b/src/apis/routes/__pycache__/inference_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..76b19a70df82a14eef9a3b687fd6703e3d62a5d4 Binary files /dev/null and b/src/apis/routes/__pycache__/inference_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/location_route.cpython-311.pyc b/src/apis/routes/__pycache__/location_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..41c6f4a14785092e5ff26b5f76cd1c1905853abf Binary files /dev/null and b/src/apis/routes/__pycache__/location_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/planner_route.cpython-311.pyc b/src/apis/routes/__pycache__/planner_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..65d940ebdc7ef7f232c0fc315be06145aaf44abf Binary files /dev/null and b/src/apis/routes/__pycache__/planner_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/post_router.cpython-311.pyc b/src/apis/routes/__pycache__/post_router.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a6c3ca6f1f41fb53269ad7281dd5caabe45b478 Binary files /dev/null and b/src/apis/routes/__pycache__/post_router.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/reaction_route.cpython-311.pyc b/src/apis/routes/__pycache__/reaction_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b05d1461e5d8bfbd5a29cedfb54333c080d6520b Binary files /dev/null and b/src/apis/routes/__pycache__/reaction_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/scheduling_router.cpython-311.pyc b/src/apis/routes/__pycache__/scheduling_router.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9391a54ddb115e90758bdd12318f363430be226e Binary files /dev/null and b/src/apis/routes/__pycache__/scheduling_router.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/travel_dest_route.cpython-311.pyc b/src/apis/routes/__pycache__/travel_dest_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..80b125e1da4450aae1163ffb5c0d7850ab614da5 Binary files /dev/null and b/src/apis/routes/__pycache__/travel_dest_route.cpython-311.pyc differ diff --git a/src/apis/routes/__pycache__/user_route.cpython-311.pyc b/src/apis/routes/__pycache__/user_route.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a3f4b7e6c7c1b47529e5a42e8bae58e958e735de Binary files /dev/null and b/src/apis/routes/__pycache__/user_route.cpython-311.pyc differ diff --git a/src/apis/routes/chat_route.py b/src/apis/routes/chat_route.py new file mode 100644 index 0000000000000000000000000000000000000000..555e7860bacfd889f54e46bd6e75cfca132bb567 --- /dev/null +++ b/src/apis/routes/chat_route.py @@ -0,0 +1,39 @@ +from fastapi import APIRouter, status, Depends, BackgroundTasks, HTTPException +from fastapi.responses import JSONResponse +from src.utils.logger import logger +from src.agents.role_play.func import create_agents +from pydantic import BaseModel, Field +from typing import List, Dict, Any +from src.agents.role_play.scenarios import get_scenarios, get_scenario_by_id + +router = APIRouter(prefix="/ai", tags=["AI"]) + + +class RoleplayRequest(BaseModel): + query: str = Field(..., description="User's query for the AI agent") + session_id: str = Field( + ..., description="Session ID for tracking user interactions" + ) + scenario: Dict[str, Any] = Field(..., description="The scenario for the roleplay") + + +@router.get("/scenarios", status_code=status.HTTP_200_OK) +async def list_scenarios(): + return JSONResponse(content=get_scenarios()) + + +@router.post("/roleplay", status_code=status.HTTP_200_OK) +async def roleplay(request: RoleplayRequest): + scenario = request.scenario + if not scenario: + raise HTTPException(status_code=400, detail="Scenario not provided") + print("alo", scenario) + print(create_agents(scenario).get_state({"configurable": {"thread_id": request.session_id}})) + response = await create_agents(scenario).ainvoke( + { + "messages": [request.query], + }, + {"configurable": {"thread_id": request.session_id}}, + ) + + return JSONResponse(content=response["messages"][-1].content) diff --git a/src/apis/routes/user_route.py b/src/apis/routes/user_route.py new file mode 100644 index 0000000000000000000000000000000000000000..e2dee292b372cdb04682138422eebf3c2dbf1f0c --- /dev/null +++ b/src/apis/routes/user_route.py @@ -0,0 +1,7 @@ +from fastapi import APIRouter, status, Depends, BackgroundTasks +from fastapi.responses import JSONResponse +from src.utils.logger import logger +from src.utils.redis import set_key_redis, delete_key_redis + + +router = APIRouter(prefix="/user", tags=["User"]) \ No newline at end of file diff --git a/src/config/__pycache__/llm.cpython-311.pyc b/src/config/__pycache__/llm.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0d74e310a1cff2b8a649a550e5427009f04cfa58 Binary files /dev/null and b/src/config/__pycache__/llm.cpython-311.pyc differ diff --git a/src/config/llm.py b/src/config/llm.py new file mode 100644 index 0000000000000000000000000000000000000000..80dec785a05be26940c76c0059c3aa6468b24c2c --- /dev/null +++ b/src/config/llm.py @@ -0,0 +1,13 @@ +from dotenv import load_dotenv +load_dotenv() +from langchain_google_genai import ChatGoogleGenerativeAI + + +# Initialize model +model = ChatGoogleGenerativeAI( + model="gemini-2.5-flash", + temperature=0.7, + max_tokens=None, + timeout=None, + max_retries=2, +) diff --git a/src/utils/.DS_Store b/src/utils/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..624b662062226fe9bd478ddb578537145f81145b Binary files /dev/null and b/src/utils/.DS_Store differ diff --git a/src/utils/__pycache__/helper.cpython-311.pyc b/src/utils/__pycache__/helper.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5f686c7f76ec771d55bbf2b8534212f7474b4eeb Binary files /dev/null and b/src/utils/__pycache__/helper.cpython-311.pyc differ diff --git a/src/utils/__pycache__/logger.cpython-311.pyc b/src/utils/__pycache__/logger.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1f982f50db676e62019becb67f39ca3fe35e4155 Binary files /dev/null and b/src/utils/__pycache__/logger.cpython-311.pyc differ diff --git a/src/utils/__pycache__/mongo.cpython-311.pyc b/src/utils/__pycache__/mongo.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f0bc43a6f451764a644656ef3a4b7936b2ba3bee Binary files /dev/null and b/src/utils/__pycache__/mongo.cpython-311.pyc differ diff --git a/src/utils/__pycache__/redis.cpython-311.pyc b/src/utils/__pycache__/redis.cpython-311.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2ac3bfd2762c2f6999a308e116cfd9e53df25a04 Binary files /dev/null and b/src/utils/__pycache__/redis.cpython-311.pyc differ diff --git a/src/utils/logger.py b/src/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..471c7f2b622e98938d3a671ab52e331f06f6a2f5 --- /dev/null +++ b/src/utils/logger.py @@ -0,0 +1,65 @@ +import logging +import os +from datetime import datetime +from pathlib import Path + +import pytz + + +class CoreCFG: + PROJECT_NAME = "BE" + BOT_NAME = str("BE") + + +def get_date_time(): + return datetime.now(pytz.timezone("Asia/Ho_Chi_Minh")) + + +DATE_TIME = get_date_time().date() +BASE_DIR = os.path.dirname(Path(__file__).parent.parent) +LOG_DIR = os.path.join(BASE_DIR, "logs") + + +class CustomFormatter(logging.Formatter): + green = "\x1b[0;32m" + grey = "\x1b[38;5;248m" + yellow = "\x1b[38;5;229m" + red = "\x1b[31;20m" + bold_red = "\x1b[31;1m" + blue = "\x1b[38;5;31m" + white = "\x1b[38;5;255m" + reset = "\x1b[38;5;15m" + + base_format = f"{grey}%(asctime)s | %(name)s | %(threadName)s | {{level_color}}%(levelname)-8s{grey} | {blue}%(module)s:%(lineno)d{grey} - {white}%(message)s" + + FORMATS = { + logging.INFO: base_format.format(level_color=green), + logging.WARNING: base_format.format(level_color=yellow), + logging.ERROR: base_format.format(level_color=red), + logging.CRITICAL: base_format.format(level_color=bold_red), + } + + def format(self, record): + log_fmt = self.FORMATS.get(record.levelno) + formatter = logging.Formatter(log_fmt) + return formatter.format(record) + + +def custom_logger(app_name="APP"): + logger_r = logging.getLogger(name=app_name) + # Set the timezone to Ho_Chi_Minh + tz = pytz.timezone("Asia/Ho_Chi_Minh") + + logging.Formatter.converter = lambda *args: datetime.now(tz).timetuple() + + ch = logging.StreamHandler() + ch.setLevel(logging.INFO) + ch.setFormatter(CustomFormatter()) + + logger_r.setLevel(logging.INFO) + logger_r.addHandler(ch) + + return logger_r + + +logger = custom_logger(app_name=CoreCFG.PROJECT_NAME) diff --git a/src/utils/redis.py b/src/utils/redis.py new file mode 100644 index 0000000000000000000000000000000000000000..8759f5fd44ee1f5c5bdaf5228b4544b06d383be1 --- /dev/null +++ b/src/utils/redis.py @@ -0,0 +1,18 @@ +import redis.asyncio as redis + +# redis_client = redis.from_url( +# RedisCfg.REDIS_URL, encoding="utf-8", decode_responses=True +# ) + + +async def set_key_redis(key, value, time=300): + return None #redis_client.set(key, value, time) + + +async def get_key_redis(key): + + return None #await redis_client.get(key) + + +async def delete_key_redis(key): + return None#redis_client.delete(key)