{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "afd55886-5f5b-4794-838e-ef8179fb0394",
   "metadata": {},
   "source": [
    "##### **** This notebook apply doc_id transform to jsonl files and generate the correspoiding parquet files. \n",
    "##### **** In addition, it converts the parquet files to jsonl files\n",
    "\n",
    "##### **** These pip installs need to be adapted to use the appropriate release level. Alternatively, The venv running the jupyter lab could be pre-configured with a requirement file that includes the right release. Example for transform developers working from git clone:\n",
    "```\n",
    "make venv \n",
    "source venv/bin/activate \n",
    "pip install jupyterlab\n",
    "```"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "4c45c3c6-e4d7-4e61-8de6-32d61f2ce695",
   "metadata": {},
   "outputs": [],
   "source": [
    "## This is here as a reference only\n",
    "# Users and application developers must use the right tag for the latest from pypi\n",
    "# pip install data-prep-kit-transforms[doc_id]"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "ebf1f782-0e61-485c-8670-81066beb734c",
   "metadata": {},
   "source": [
    "##### ***** Import required classes and modules"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "9669273a-8fcc-4b40-9b20-8df658e2ab58",
   "metadata": {},
   "outputs": [],
   "source": [
    "from dpk_doc_id import DocID\n",
    "from dotenv import load_dotenv\n",
    "import os\n",
    "# Load environment variables from a .env file if present\n",
    "load_dotenv()\n",
    "\n",
    "INPUT_FOLDER=os.environ['INPUT_FOLDER']\n",
    "OUTPUT_FOLDER=os.environ['OUTPUT_FOLDER']\n",
    "OUTPUT_JSON=os.environ['OUTPUT_JSON']\n",
    "display(f\"Input folder: {INPUT_FOLDER}\", f\"Output folder: {OUTPUT_FOLDER}\", f\"Output JSON: {OUTPUT_JSON}\")"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "7234563c-2924-4150-8a31-4aec98c1bf33",
   "metadata": {},
   "source": [
    "##### ***** Setup runtime parameters for this transform\n",
    "* doc_column - specifies name of the column containing the document (required for ID generation)\n",
    "* hash_column - specifies name of the column created to hold the string document id, if None, id is not generated\n",
    "##### ***** Use python runtime to invoke the transform"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "badafb96-64d2-4bb8-9f3e-b23713fd5c3f",
   "metadata": {},
   "outputs": [],
   "source": [
    "DocID(input_folder= INPUT_FOLDER,\n",
    "        output_folder=OUTPUT_FOLDER,\n",
    "        doc_id_doc_column=\"messages\",\n",
    "        doc_id_hash_column=\"uuid\",\n",
    "        data_files_to_use=\"['.jsonl']\"\n",
    "        ).transform()"
   ]
  },
  {
   "cell_type": "markdown",
   "id": "c3df5adf-4717-4a03-864d-9151cd3f134b",
   "metadata": {},
   "source": [
    "##### **** The specified folder will include the transformed parquet files."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "0568f860",
   "metadata": {},
   "outputs": [],
   "source": [
    "if x: \n",
    "    print (\"No X\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "7276fe84-6512-4605-ab65-747351e13a7c",
   "metadata": {},
   "outputs": [],
   "source": [
    "import glob\n",
    "output_files=glob.glob(f\"{OUTPUT_FOLDER}/**/*.parquet\", recursive=True)\n",
    "len(output_files)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "845a75cf-f4a9-467d-87fa-ccbac1c9beb8",
   "metadata": {},
   "outputs": [],
   "source": [
    "%%time\n",
    "def write_parquet_to_jsonl(parquet_file_path, output_jsonl_path):\n",
    "    import polars as pl\n",
    "    import json\n",
    "    try:\n",
    "        df = pl.read_parquet(parquet_file_path)\n",
    "    except Exception as e:\n",
    "        print (f\"Error reading {parquet_file_path}: {type(e)}-{str(e)}\")\n",
    "        raise e\n",
    "    try:\n",
    "        df.write_ndjson(output_jsonl_path)\n",
    "    except Exception as e:\n",
    "        print (f\"Error writing {output_jsonl_path}: {type(e)}-{str(e)}\")\n",
    "        raise e\n",
    "\n",
    "\n",
    "\n",
    "def write_parquet_folder_to_jsonl(parquet_folder, jsonl_folder):\n",
    "    import os\n",
    "    import glob\n",
    "    parquet_files = glob.glob(f\"{parquet_folder}/**/*.parquet\", recursive=True)\n",
    "    for parquet_file in parquet_files:\n",
    "        json_file=f\"{os.path.splitext(parquet_file)[0]}.jsonl\"\n",
    "        json_file=json_file.replace(parquet_folder, jsonl_folder)\n",
    "        os.makedirs(os.path.dirname(json_file), exist_ok=True)\n",
    "        print (f\"Processing: {parquet_file} -> {json_file}\")\n",
    "        try:\n",
    "            write_parquet_to_jsonl(parquet_file, json_file)\n",
    "        except Exception as e:\n",
    "            print (f\"Error processing {parquet_file}: {type(e)}-{str(e)}\")\n",
    "            return\n",
    "\n",
    "write_parquet_folder_to_jsonl(OUTPUT_FOLDER, OUTPUT_JSON)\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "996b7876",
   "metadata": {},
   "outputs": [],
   "source": [
    "import glob\n",
    "json_files=glob.glob(f\"{OUTPUT_JSON}/**/*.jsonl\", recursive=True)\n",
    "len(json_files)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "py12-dpk",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.12.12"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
