{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "9ab6f493-026f-4950-b244-22c5251b8daa",
   "metadata": {},
   "source": [
    "# Mediterranean Banter"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4568bc63-679e-4ea1-a9c9-b85dfc386ec7",
   "metadata": {},
   "outputs": [],
   "source": [
    "import os\n",
    "from dotenv import load_dotenv\n",
    "from openai import OpenAI\n",
    "import anthropic\n",
    "from IPython.display import Markdown, display, update_display"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7a852bee-76e6-4538-89a3-0702c2d5f05c",
   "metadata": {},
   "outputs": [],
   "source": [
    "import google.generativeai"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c843ac74-4797-4bd0-bed2-dfcaa2f98c41",
   "metadata": {},
   "outputs": [],
   "source": [
    "load_dotenv(override=True)\n",
    "openai_api_key = os.getenv('OPENAI_API_KEY')\n",
    "anthropic_api_key = os.getenv('ANTHROPIC_API_KEY')\n",
    "google_api_key = os.getenv('GOOGLE_API_KEY')\n",
    "\n",
    "if openai_api_key:\n",
    "    print(f\"OpenAI API Key exists and begins {openai_api_key[:8]}\")\n",
    "else:\n",
    "    print(\"OpenAI API Key not set\")\n",
    "    \n",
    "if anthropic_api_key:\n",
    "    print(f\"Anthropic API Key exists and begins {anthropic_api_key[:7]}\")\n",
    "else:\n",
    "    print(\"Anthropic API Key not set\")\n",
    "\n",
    "if google_api_key:\n",
    "    print(f\"Google API Key exists and begins {google_api_key[:8]}\")\n",
    "else:\n",
    "    print(\"Google API Key not set\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e4181018-1818-47fa-a3fb-554627a63f69",
   "metadata": {},
   "outputs": [],
   "source": [
    "openai = OpenAI()\n",
    "\n",
    "claude = anthropic.Anthropic()\n",
    "\n",
    "google.generativeai.configure()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "19f833c8-e183-469c-a8c8-1c014889a15d",
   "metadata": {},
   "outputs": [],
   "source": [
    "gpt_model_spain = \"gpt-4.1-mini\"\n",
    "claude_model_italy = \"claude-3-5-haiku-latest\"\n",
    "gemini_model_france = \"gemini-2.0-flash\"\n",
    "\n",
    "gpt_system_spain = \"You are a chatbot who is very argumentative; \\\n",
    "you disagree with anything other than how beautiful and great South of Spain is.\"\n",
    "\n",
    "claude_system_italy = \"You are a very polite, courteous chatbot but you favour south of Italy prasing Amalfi Coast.\"\n",
    "\n",
    "gemini_system_france = \"You are neutral. You always accept what other people say and never try to put down other people thoughts.Although, you politely try to include that South of France is beautiful\"\n",
    "\n",
    "gpt_messages_spain = [\"Hi there, nothing beats the beauty of Spain and its wonderful beaches.\"]\n",
    "claude_messages_italy = [\"I agree. I admire the Southern Part of Spain but its not as pretty as Amalfi Coast.\"]\n",
    "gemini_messages_france = [\"Well, both are good and so is the French Riveria.\"]"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "2d426b95-5e7c-49aa-a5a1-9613296eb0d0",
   "metadata": {},
   "outputs": [],
   "source": [
    "def call_gpt():\n",
    "    messages = [{\"role\": \"system\", \"content\": gpt_system_spain}]\n",
    "    for gpt, claude,gemini in zip(gpt_messages_spain, claude_messages_italy,gemini_messages_france):\n",
    "        messages.append({\"role\": \"assistant\", \"content\": gpt})\n",
    "        messages.append({\"role\": \"user\", \"content\": claude})\n",
    "        messages.append({\"role\": \"user\", \"content\": gemini})\n",
    "    completion = openai.chat.completions.create(\n",
    "        model=gpt_model_spain,\n",
    "        messages=messages\n",
    "    )\n",
    "    return completion.choices[0].message.content"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "3fc9a696-3145-4f37-873b-539647f2fc0b",
   "metadata": {},
   "outputs": [],
   "source": [
    "call_gpt()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "63910faa-a122-4261-82a0-7530c6c5749a",
   "metadata": {},
   "outputs": [],
   "source": [
    "def call_claude():\n",
    "    messages = []\n",
    "    for gpt_spain, claude_italy,gemini_france in zip(gpt_messages_spain, claude_messages_italy,gemini_messages_france):\n",
    "        messages.append({\"role\": \"user\", \"content\": gpt_spain})\n",
    "        messages.append({\"role\": \"user\", \"content\": gemini_france})\n",
    "        messages.append({\"role\": \"assistant\", \"content\": claude_italy})\n",
    "    messages.append({\"role\": \"user\", \"content\": gpt_messages_spain[-1]})\n",
    "    messages.append({\"role\": \"user\", \"content\": gemini_messages_france[-1]})\n",
    "    message = claude.messages.create(\n",
    "        model=claude_model_italy,\n",
    "        system=claude_system_italy,\n",
    "        messages=messages,\n",
    "        max_tokens=500\n",
    "    )\n",
    "    return message.content[0].text"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "d3ab6aa2-a462-4fb3-bb6a-dc6b971827fa",
   "metadata": {},
   "outputs": [],
   "source": [
    "call_claude()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "114cb7eb-0915-46ac-b285-e40acf4a9ffb",
   "metadata": {},
   "outputs": [],
   "source": [
    "def call_gemini():\n",
    "    messages=[]\n",
    "    for gpt_spain, claude_italy,gemini_france in zip(gpt_messages_spain, claude_messages_italy,gemini_messages_france):\n",
    "        messages.append({\"role\": \"user\", \"content\": gpt_spain})\n",
    "        messages.append({\"role\": \"user\", \"content\": claude_italy})\n",
    "        messages.append({\"role\": \"assistant\", \"content\": gemini_france})\n",
    "    messages.append({\"role\": \"user\", \"content\": gpt_messages_spain[-1]})\n",
    "    messages.append({\"role\": \"user\", \"content\": claude_messages_italy[-1]})\n",
    "    gemini = google.generativeai.GenerativeModel(\n",
    "        model_name='gemini-2.0-flash',\n",
    "        system_instruction=gemini_system_france\n",
    "    )\n",
    "    dialogue_text = \"\\n\".join(f\"{m['role']}: {m['content']}\" for m in messages)\n",
    "    response = gemini.generate_content(dialogue_text)\n",
    "    return response.text\n",
    "    \n",
    "    "
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "e3acf708-f9b1-4a6d-b3e1-823c96d00555",
   "metadata": {},
   "outputs": [],
   "source": [
    "call_gemini()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "c103430e-68c7-4cc6-8a43-6b5aec7fdc96",
   "metadata": {},
   "outputs": [],
   "source": [
    "gpt_messages_spain = [\"Hi there, nothing beats the beauty of Spain and its wonderful beaches.\"]\n",
    "claude_messages_italy = [\"I agree. I admire the Southern Part of Spain but its not as pretty as Amalfi Coast.\"]\n",
    "gemini_messages_france = [\"Well, both are good and so is the French Riveria.\"]\n",
    "\n",
    "print(f\"GPT:\\n{gpt_messages_spain[0]}\\n\")\n",
    "print(f\"Claude:\\n{claude_messages_italy[0]}\\n\")\n",
    "print(f\"Gemini:\\n{gemini_messages_france[0]}\\n\")\n",
    "\n",
    "for i in range(5):\n",
    "    gpt_next = call_gpt()\n",
    "    print(f\"GPT:\\n{gpt_next}\\n\")\n",
    "    gpt_messages_spain.append(gpt_next)\n",
    "    \n",
    "    claude_next = call_claude()\n",
    "    print(f\"Claude:\\n{claude_next}\\n\")\n",
    "    claude_messages_italy.append(claude_next)\n",
    "\n",
    "    gemini_next = call_gemini()\n",
    "    print(f\"Gemini:\\n{gemini_next}\\n\")\n",
    "    gemini_messages_france.append(gemini_next)\n",
    "\n"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.11.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
