website / test_gemini_integration.py
Tim Luka Horstmann
Changed gemini system prompt integration
a9456f8
#!/usr/bin/env python3
"""
Test script for Gemini API integration
"""
import os
import asyncio
from datetime import datetime
# Mock the dependencies for testing
class MockClient:
def __init__(self, api_key):
self.api_key = api_key
class models:
@staticmethod
def generate_content_stream(model, contents, config):
# Mock streaming response
class MockChunk:
text = "Hello! I'm Tim Luka Horstmann, a Computer Scientist currently pursuing my MSc in Data and AI at Institut Polytechnique de Paris."
yield MockChunk()
class MockTypes:
class Content:
def __init__(self, role, parts):
self.role = role
self.parts = parts
class Part:
def __init__(self, text):
self.text = text
@classmethod
def from_text(cls, text):
return cls(text)
class GenerateContentConfig:
def __init__(self, temperature, top_p, max_output_tokens):
self.temperature = temperature
self.top_p = top_p
self.max_output_tokens = max_output_tokens
# Test function similar to our Gemini implementation
async def test_gemini_integration():
"""Test the Gemini integration logic"""
# Mock environment variables
USE_GEMINI = True
gemini_api_key = "test_api_key"
gemini_model = "gemini-2.5-flash-preview-05-20"
# Mock full CV text
full_cv_text = "Tim Luka Horstmann is a Computer Scientist pursuing MSc in Data and AI at Institut Polytechnique de Paris."
# Initialize mock client
gemini_client = MockClient(api_key=gemini_api_key)
types = MockTypes()
# Test query and history
query = "What is your education?"
history = []
print(f"Testing Gemini integration...")
print(f"USE_GEMINI: {USE_GEMINI}")
print(f"Query: {query}")
# Simulate the Gemini function logic
current_date = datetime.now().strftime("%Y-%m-%d")
system_prompt = (
"You are Tim Luka Horstmann, a Computer Scientist. A user is asking you a question. Respond as yourself, using the first person, in a friendly and concise manner. "
"For questions about your CV, base your answer *exclusively* on the provided CV information below and do not add any details not explicitly stated. "
"For casual questions not covered by the CV, respond naturally but limit answers to general truths about yourself (e.g., your current location is Paris, France, or your field is AI) "
"and say 'I don't have specific details to share about that' if pressed for specifics beyond the CV or FAQs. Do not invent facts, experiences, or opinions not supported by the CV or FAQs. "
f"Today's date is {current_date}. "
f"CV: {full_cv_text}"
)
# Build messages for Gemini (no system role - embed instructions in first user message)
messages = []
# Add conversation history
for msg in history:
role = "user" if msg["role"] == "user" else "model"
messages.append(types.Content(role=role, parts=[types.Part.from_text(text=msg["content"])]))
# Add current query with system prompt embedded
if not history: # If no history, include system prompt with the first message
combined_query = f"{system_prompt}\n\nUser question: {query}"
else:
combined_query = query
messages.append(types.Content(role="user", parts=[types.Part.from_text(text=combined_query)]))
print(f"System prompt length: {len(system_prompt)}")
print(f"Number of messages: {len(messages)}")
# Mock the streaming response
response = gemini_client.models.generate_content_stream(
model=gemini_model,
contents=messages,
config=types.GenerateContentConfig(
temperature=0.3,
top_p=0.7,
max_output_tokens=512,
)
)
print("Streaming response:")
for chunk in response:
if chunk.text:
print(f"Chunk: {chunk.text}")
print("✅ Gemini integration test completed successfully!")
return True
if __name__ == "__main__":
asyncio.run(test_gemini_integration())