{ "cells": [ { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [], "source": [ "from tensorflow.keras.preprocessing.text import Tokenizer\n", "from tensorflow.keras.preprocessing.sequence import pad_sequences\n", "from tensorflow import keras\n", "from pathlib import Path as pp\n", "\n", "import os\n", "notebook_path = os.getcwd()" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "ename": "ValueError", "evalue": "File not found: filepath=../imsoumyaneel-sentiment_analysis_llama2.keras. Please ensure the file is an accessible `.keras` zip file.", "output_type": "error", "traceback": [ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", "Cell \u001b[0;32mIn[19], line 15\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m padded_text\n\u001b[1;32m 14\u001b[0m \u001b[38;5;66;03m# load model\u001b[39;00m\n\u001b[0;32m---> 15\u001b[0m model \u001b[38;5;241m=\u001b[39m \u001b[43mkeras\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmodels\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mload_model\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43m../imsoumyaneel-sentiment_analysis_llama2.keras\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m \u001b[38;5;66;03m# Replace \"your_model.h5\" with the path to your trained model\u001b[39;00m\n\u001b[1;32m 17\u001b[0m \u001b[38;5;66;03m# Preprocess the custom input text\u001b[39;00m\n\u001b[1;32m 18\u001b[0m preprocessed_text \u001b[38;5;241m=\u001b[39m preprocess_text(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mi love this thing!\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", "File \u001b[0;32m~/Documents/models/twitter_model/.venv/lib/python3.10/site-packages/keras/src/saving/saving_api.py:185\u001b[0m, in \u001b[0;36mload_model\u001b[0;34m(filepath, custom_objects, compile, safe_mode)\u001b[0m\n\u001b[1;32m 183\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m legacy_h5_format\u001b[38;5;241m.\u001b[39mload_model_from_hdf5(filepath)\n\u001b[1;32m 184\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;28mstr\u001b[39m(filepath)\u001b[38;5;241m.\u001b[39mendswith(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m.keras\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m--> 185\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 186\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFile not found: filepath=\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfilepath\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 187\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mPlease ensure the file is an accessible `.keras` \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 188\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mzip file.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 189\u001b[0m )\n\u001b[1;32m 190\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 191\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\n\u001b[1;32m 192\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mFile format not supported: filepath=\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mfilepath\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m. \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 193\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mKeras 3 only supports V3 `.keras` files and \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 202\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mmight have a different name).\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 203\u001b[0m )\n", "\u001b[0;31mValueError\u001b[0m: File not found: filepath=../imsoumyaneel-sentiment_analysis_llama2.keras. Please ensure the file is an accessible `.keras` zip file." ] } ], "source": [ "tokenizer = Tokenizer()\n", "max_length = 200\n", "models_dir = pp(pp(notebook_path).parent, 'models')\n", "\n", "\n", "# use model\n", "def preprocess_text(text):\n", " # Tokenize the text\n", " tokenized_text = tokenizer.texts_to_sequences([text])\n", " # Pad sequences to the same length as training data\n", " padded_text = pad_sequences(tokenized_text, maxlen=max_length, padding='post')\n", " return padded_text\n", "\n", "# load model\n", "model = keras.models.load_model('../imsoumyaneel-sentiment_analysis_llama2.keras') # Replace \"your_model.h5\" with the path to your trained model\n", "\n", "# Preprocess the custom input text\n", "preprocessed_text = preprocess_text(\"i love this thing!\")\n", "\n", "# Make predictions\n", "predictions = model.predict(preprocessed_text)\n", "\n", "predictions[0][0]" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.10.12" } }, "nbformat": 4, "nbformat_minor": 2 }