Spaces:
Sleeping
Sleeping
| from langchain_groq import ChatGroq | |
| import traceback | |
| import json | |
| # Initialize LLM | |
| try: | |
| llm = ChatGroq( | |
| temperature=0, | |
| groq_api_key="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", | |
| model_name="llama-3.2-90b-vision-preview" | |
| ) | |
| except Exception as e: | |
| print(f"Error initializing LLM: {e}") | |
| traceback.print_exc() | |
| raise | |
| # Prompt | |
| prompt = ("You would be given a code as an input for the code review {code}. Make sure to use everything you know " | |
| "to provide the best support possible." | |
| "You must strive to provide a complete and accurate response." | |
| "You are expected to give a detailed, informative response for the code written for each line of the code. Ensure the answer is complete" | |
| "and maintain a helpful and friendly tone throughout." | |
| ) | |
| # Function to make predictions | |
| def generate(escaped_code): | |
| try: | |
| result = llm.invoke(escaped_code) | |
| return result.content | |
| except Exception as e: | |
| print(f"Error during generation of Code Review: {e}") | |
| traceback.print_exc() | |
| raise | |
| def loadContext(file_path: str) -> str: | |
| try: | |
| with open(file_path,'r',encoding='utf-8') as file: | |
| content = file.read() | |
| return content | |
| except Exception as e: | |
| raise Exception(f"Failed to read the file at {file_path}: {str(e)}") | |