{
  "cells": [
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "ymhGfgFSR17k"
      },
      "source": [
        "## **Applio**\n",
        "A simple, high-quality voice conversion tool focused on ease of use and performance.\n",
        "\n",
        "[Support](https://discord.gg/urxFjYmYYh) — [Discord Bot](https://discord.com/oauth2/authorize?client_id=1144714449563955302&permissions=1376674695271&scope=bot%20applications.commands) — [Find Voices](https://applio.org/models) — [GitHub](https://github.com/IAHispano/Applio)\n",
        "\n",
        "<br>\n",
        "\n",
        "### **Credits**\n",
        "- Encryption method: [Hina](https://github.com/hinabl)\n",
        "- Extra section: [Poopmaster](https://github.com/poiqazwsx)\n",
        "- Main development: [Applio Team](https://github.com/IAHispano)"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "vtON700qokuQ"
      },
      "outputs": [],
      "source": [
        "# @title **Install Applio**\n",
        "import os\n",
        "import codecs\n",
        "import shutil\n",
        "import tarfile\n",
        "import subprocess\n",
        "\n",
        "from pathlib import Path\n",
        "from IPython.display import clear_output\n",
        "\n",
        "rot_47 = lambda encoded_text: \"\".join(\n",
        "    [\n",
        "        (\n",
        "            chr(\n",
        "                (ord(c) - (ord(\"a\") if c.islower() else ord(\"A\")) - 47) % 26\n",
        "                + (ord(\"a\") if c.islower() else ord(\"A\"))\n",
        "            )\n",
        "            if c.isalpha()\n",
        "            else c\n",
        "        )\n",
        "        for c in encoded_text\n",
        "    ]\n",
        ")\n",
        "\n",
        "org_name = rot_47(\"Vkkgdj\")\n",
        "new_name = rot_47(\"kmjbmvh_hg\")\n",
        "uioawhd = rot_47(codecs.decode(\"pbbxa://oqbpcj.kwu/QIPqaxivw/Ixxtqw.oqb\", \"rot_13\"))\n",
        "uyadwa = codecs.decode(\"ncc.cl\", \"rot_13\")\n",
        "A = \"/content/\" + rot_47(\"Kikpm.ovm.bu\")\n",
        "!git clone --depth 1 $uioawhd $new_name --branch 3.2.7 --single-branch\n",
        "%cd $new_name/\n",
        "clear_output()\n",
        "def vidal_setup():\n",
        "    A = \"/content/\" + rot_47(\"Kikpm.ovm.bu\")\n",
        "\n",
        "    D = \"/\"\n",
        "    if not os.path.exists(A):\n",
        "        M = os.path.dirname(A)\n",
        "        os.makedirs(M, exist_ok=True)\n",
        "        print(\"No cached install found..\")\n",
        "        try:\n",
        "            N = codecs.decode(\n",
        "                    \"uggcf://uhttvatsnpr.pb/VNUvfcnab/Nccyvb/erfbyir/znva/Raivebzrag/Pbyno/Cache.gne.tm\",\n",
        "                    \"rot_13\",\n",
        "                )\n",
        "            subprocess.run([\"wget\", \"-O\", A, N])\n",
        "            print(\"Download completed successfully!\")\n",
        "        except Exception as H:\n",
        "            print(str(H))\n",
        "            if os.path.exists(A):\n",
        "                os.remove(A)\n",
        "    if Path(A).exists():\n",
        "        with tarfile.open(A, \"r:gz\") as I:\n",
        "            I.extractall(D)\n",
        "            print(f\"Extraction of {A} to {D} completed.\")\n",
        "        if os.path.exists(A):\n",
        "            os.remove(A)\n",
        "\n",
        "vidal_setup()\n",
        "!pip uninstall torch torchvision torchaudio -y\n",
        "!pip install pydantic==2.8.2 fastapi==0.112.0 starlette==0.37.2\n",
        "!pip install torch==2.3.1 torchvision==0.18.1 torchaudio==2.3.1 --upgrade --index-url https://download.pytorch.org/whl/cu121\n",
        "clear_output()\n",
        "print(\"Finished installing requirements! \")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "-7cQtXouqpQi"
      },
      "outputs": [],
      "source": [
        "# @title **Start Applio**\n",
        "# @markdown  ### Just activate this in case the share link of the gradio dont work\n",
        "import codecs\n",
        "import threading\n",
        "import urllib.request\n",
        "import time\n",
        "import ipywidgets as widgets\n",
        "from IPython.display import display\n",
        "import os\n",
        "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n",
        "!npm install -g localtunnel &>/dev/null\n",
        "share_tunnel = False  # @param {type:\"boolean\"}\n",
        "def start_applio():\n",
        "    if share_tunnel:\n",
        "        !python $uyadwa --listen\n",
        "    else:\n",
        "        !python $uyadwa --listen --share\n",
        "\n",
        "%load_ext tensorboard\n",
        "%reload_ext tensorboard\n",
        "%tensorboard --logdir logs --bind_all\n",
        "\n",
        "if \"autobackups\" not in globals():\n",
        "    autobackups = False\n",
        "\n",
        "if autobackups:\n",
        "    thread = threading.Thread(target=backup_files)\n",
        "    thread.start()\n",
        "\n",
        "thread_applio = threading.Thread(target=start_applio)\n",
        "thread_applio.start()\n",
        "\n",
        "if share_tunnel:\n",
        "    if not os.path.exists(codecs.decode(\"eip/zbqryf/cergenvarqf/cergenvarq_i2/s0T48x.cgu\", \"rot_13\")):\n",
        "        while not os.path.exists(codecs.decode(\"eip/zbqryf/cergenvarqf/cergenvarq_i2/s0T48x.cgu\", \"rot_13\")):\n",
        "            time.sleep(2)\n",
        "        time.sleep(5)\n",
        "    else:\n",
        "        time.sleep(10)\n",
        "    with open('url.txt', 'w') as file:\n",
        "        file.write('')\n",
        "\n",
        "    get_ipython().system_raw('lt --port 6969 >> url.txt 2>&1 &')\n",
        "\n",
        "    time.sleep(4)\n",
        "\n",
        "    endpoint_ip = urllib.request.urlopen('https://ipv4.icanhazip.com').read().decode('utf8').strip(\"\\n\")\n",
        "\n",
        "    with open('url.txt', 'r') as file:\n",
        "        tunnel_url = file.read()\n",
        "        tunnel_url = tunnel_url.replace(\"your url is: \", \"\")\n",
        "\n",
        "    print(f\"Share Link: \\033[0m\\033[93m{tunnel_url}\\033[0m\", end=\"\\033[0m\\n\")\n",
        "\n",
        "    password_endpoint_widget = widgets.Text(\n",
        "    value=endpoint_ip,\n",
        "    description='Password IP:',\n",
        "    disabled=True\n",
        "    )\n",
        "    display(password_endpoint_widget)\n",
        "\n",
        "\n",
        "\n",
        "while True:\n",
        "    time.sleep(5)\n"
      ]
    },
    {
      "cell_type": "markdown",
      "metadata": {
        "id": "3b59-2x-qEnX"
      },
      "source": [
        "### **Extra**\n",
        "Enjoy extra options that can make it easier for you to use Applio\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "19LNv6iYqF6_"
      },
      "outputs": [],
      "source": [
        "# @title Mount Drive\n",
        "# @markdown Mount the files from Google Drive to the Colab.\n",
        "from google.colab import drive\n",
        "\n",
        "drive.mount(\"/content/drive\")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "I5o6MlpFouiG"
      },
      "outputs": [],
      "source": [
        "# @title Auto Backup\n",
        "# @markdown When running it, it will be activated or deactivated previously to start up together with Applio.\n",
        "import os\n",
        "import shutil\n",
        "import time\n",
        "\n",
        "LOGS_FOLDER = \"/content/program_ml/logs/\"\n",
        "GOOGLE_DRIVE_PATH = \"/content/drive/MyDrive/ApplioBackup\"\n",
        "\n",
        "if \"autobackups\" not in globals():\n",
        "    autobackups = False\n",
        "\n",
        "cooldown = 15  # @param {type:\"slider\", min:0, max:100, step:0}\n",
        "def backup_files():\n",
        "    print(\"\\nStarting backup loop...\")\n",
        "    last_backup_timestamps_path = os.path.join(\n",
        "        LOGS_FOLDER, \"last_backup_timestamps.txt\"\n",
        "    )\n",
        "    fully_updated = False\n",
        "\n",
        "    while True:\n",
        "        try:\n",
        "            updated_files = 0\n",
        "            deleted_files = 0\n",
        "            new_files = 0\n",
        "            last_backup_timestamps = {}\n",
        "\n",
        "            try:\n",
        "                with open(last_backup_timestamps_path, \"r\") as f:\n",
        "                    last_backup_timestamps = dict(line.strip().split(\":\") for line in f)\n",
        "            except FileNotFoundError:\n",
        "                pass\n",
        "\n",
        "            for root, dirs, files in os.walk(LOGS_FOLDER):\n",
        "                # Excluding \"zips\" and \"mute\" directories\n",
        "                if \"zips\" in dirs:\n",
        "                    dirs.remove(\"zips\")\n",
        "                if \"mute\" in dirs:\n",
        "                    dirs.remove(\"mute\")\n",
        "\n",
        "                for filename in files:\n",
        "                    if filename != \"last_backup_timestamps.txt\":\n",
        "                        filepath = os.path.join(root, filename)\n",
        "                        if os.path.isfile(filepath):\n",
        "                            backup_filepath = os.path.join(\n",
        "                                GOOGLE_DRIVE_PATH,\n",
        "                                os.path.relpath(filepath, LOGS_FOLDER),\n",
        "                            )\n",
        "                            backup_folderpath = os.path.dirname(backup_filepath)\n",
        "                            if not os.path.exists(backup_folderpath):\n",
        "                                os.makedirs(backup_folderpath)\n",
        "                            last_backup_timestamp = last_backup_timestamps.get(filepath)\n",
        "                            current_timestamp = os.path.getmtime(filepath)\n",
        "                            if (\n",
        "                                last_backup_timestamp is None\n",
        "                                or float(last_backup_timestamp) < current_timestamp\n",
        "                            ):\n",
        "                                shutil.copy2(filepath, backup_filepath)\n",
        "                                last_backup_timestamps[filepath] = str(current_timestamp)\n",
        "                                if last_backup_timestamp is None:\n",
        "                                    new_files += 1\n",
        "                                else:\n",
        "                                    updated_files += 1\n",
        "\n",
        "\n",
        "            for filepath in list(last_backup_timestamps.keys()):\n",
        "                if not os.path.exists(filepath):\n",
        "                    backup_filepath = os.path.join(\n",
        "                        GOOGLE_DRIVE_PATH, os.path.relpath(filepath, LOGS_FOLDER)\n",
        "                    )\n",
        "                    if os.path.exists(backup_filepath):\n",
        "                        os.remove(backup_filepath)\n",
        "                        deleted_files += 1\n",
        "                    del last_backup_timestamps[filepath]\n",
        "\n",
        "\n",
        "            if updated_files > 0 or deleted_files > 0 or new_files > 0:\n",
        "                print(f\"Backup Complete: {new_files} new, {updated_files} updated, {deleted_files} deleted.\")\n",
        "                fully_updated = False\n",
        "            elif not fully_updated:\n",
        "                print(\"Files are up to date.\")\n",
        "                fully_updated = True\n",
        "\n",
        "            with open(last_backup_timestamps_path, \"w\") as f:\n",
        "                for filepath, timestamp in last_backup_timestamps.items():\n",
        "                    f.write(f\"{filepath}:{timestamp}\\n\")\n",
        "\n",
        "            time.sleep(cooldown if fully_updated else 0.1)\n",
        "\n",
        "\n",
        "        except Exception as error:\n",
        "            print(f\"An error occurred during backup: {error}\")\n",
        "\n",
        "\n",
        "if autobackups:\n",
        "    autobackups = False\n",
        "    print(\"Autobackup Disabled\")\n",
        "else:\n",
        "    autobackups = True\n",
        "    print(\"Autobackup Enabled\")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "-vIzj7pye8Z0"
      },
      "outputs": [],
      "source": [
        "# @title Setup new logs folder format\n",
        "# @markdown Put the exact name you put as your Model Name in Applio.\n",
        "modelname = \"My-Project\"  # @param {type:\"string\"}\n",
        "logs_folder = f\"/content/program_ml/logs/{modelname}/\"\n",
        "\n",
        "import os\n",
        "\n",
        "folder_renames = {\n",
        "    \"0_gt_wavs\": \"sliced_audios\",\n",
        "    \"1_16k_wavs\": \"sliced_audios_16k\",\n",
        "    \"2a_f0\": \"f0\",\n",
        "    \"2b-f0nsf\": \"f0_voiced\",\n",
        "    \"3_feature768\": \"v2_extracted\"\n",
        "}\n",
        "\n",
        "def rename_folders(base_path, rename_dict):\n",
        "    for old_name, new_name in rename_dict.items():\n",
        "        old_path = os.path.join(base_path, old_name)\n",
        "        new_path = os.path.join(base_path, new_name)\n",
        "        if os.path.exists(old_path):\n",
        "            os.rename(old_path, new_path)\n",
        "            print(f\"Renamed {old_path} to {new_path}\")\n",
        "        else:\n",
        "            print(f\"Folder {old_path} does not exist\")\n",
        "\n",
        "rename_folders(logs_folder, folder_renames)\n"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "ifV_vc4h4Uvx"
      },
      "outputs": [],
      "source": [
        "# @title Load a Backup\n",
        "from google.colab import drive\n",
        "import os\n",
        "\n",
        "# @markdown Put the exact name you put as your Model Name in Applio.\n",
        "modelname = \"My-Project\"  # @param {type:\"string\"}\n",
        "source_path = \"/content/drive/MyDrive/ApplioBackup/\" + modelname\n",
        "destination_path = \"/content/program_ml/logs/\" + modelname\n",
        "backup_timestamps_file = \"last_backup_timestamps.txt\"\n",
        "if not os.path.exists(source_path):\n",
        "    print(\n",
        "        \"The model folder does not exist. Please verify the name is correct or check your Google Drive.\"\n",
        "    )\n",
        "else:\n",
        "    time_ = os.path.join(\"/content/drive/MyDrive/ApplioBackup/\", backup_timestamps_file)\n",
        "    time__ = os.path.join(\"/content/program_ml/logs/\", backup_timestamps_file)\n",
        "    if os.path.exists(time_):\n",
        "        shutil.copy(time_, time__)\n",
        "    shutil.copytree(source_path, destination_path)\n",
        "    print(\"Model backup loaded successfully.\")"
      ]
    },
    {
      "cell_type": "code",
      "execution_count": null,
      "metadata": {
        "cellView": "form",
        "id": "leWbhk1X4XoY"
      },
      "outputs": [],
      "source": [
        "# @title Download all custom pretrains\n",
        "import os\n",
        "import urllib.request\n",
        "\n",
        "%mkdir /content/program_ml/rvc/models/pretraineds/pretraineds_custom\n",
        "pretrained_urls = [\n",
        "    # Ov2 Super\n",
        "    \"https://huggingface.co/ORVC/Ov2Super/resolve/main/f0Ov2Super32kG.pth\",\n",
        "    \"https://huggingface.co/ORVC/Ov2Super/resolve/main/f0Ov2Super32kD.pth\",\n",
        "    \"https://huggingface.co/ORVC/Ov2Super/resolve/main/f0Ov2Super40kG.pth\",\n",
        "    \"https://huggingface.co/ORVC/Ov2Super/resolve/main/f0Ov2Super40kD.pth\",\n",
        "\n",
        "    # TITAN\n",
        "    \"https://huggingface.co/blaise-tk/TITAN/resolve/main/models/medium/40k/pretrained/G-f040k-TITAN-Medium.pth\",\n",
        "    \"https://huggingface.co/blaise-tk/TITAN/resolve/main/models/medium/40k/pretrained/D-f040k-TITAN-Medium.pth\",\n",
        "    \"https://huggingface.co/blaise-tk/TITAN/resolve/main/models/medium/32k/pretrained/G-f032k-TITAN-Medium.pth\",\n",
        "    \"https://huggingface.co/blaise-tk/TITAN/resolve/main/models/medium/32k/pretrained/D-f032k-TITAN-Medium.pth\",\n",
        "\n",
        "    # Snowie V3\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-32k/resolve/main/D_SnowieV3.1_32k.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-32k/resolve/main/G_SnowieV3.1_32k.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-40k/resolve/main/G_SnowieV3.1_40k.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-40k/resolve/main/D_SnowieV3.1_40k.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-48k/resolve/main/G_SnowieV3.1_48k.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-48k/resolve/main/D_SnowieV3.1_48k.pth\",\n",
        "\n",
        "    # RIN E3\n",
        "    \"https://huggingface.co/MUSTAR/RIN_E3/resolve/main/RIN_E3_G.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/RIN_E3/resolve/main/RIN_E3_D.pth\",\n",
        "\n",
        "    # KLM\n",
        "    \"https://huggingface.co/SeoulStreamingStation/KLM4.1/resolve/main/D_KLM41_32k.pth\",\n",
        "    \"https://huggingface.co/SeoulStreamingStation/KLM4.1/resolve/main/G_KLM41_32k.pth\",\n",
        "    \"https://huggingface.co/SeoulStreamingStation/KLM4.1/resolve/main/D_KLM41_48k.pth\",\n",
        "    \"https://huggingface.co/SeoulStreamingStation/KLM4.1/resolve/main/G_KLM41_48k.pth\",\n",
        "\n",
        "    # SnowieV3 X RIN_E3\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-X-RinE3-40K/resolve/main/D_Snowie-X-Rin_40k.pth\",\n",
        "    \"https://huggingface.co/MUSTAR/SnowieV3.1-X-RinE3-40K/resolve/main/G_Snowie-X-Rin_40k.pth\",\n",
        "]\n",
        "output_directory = \"/content/program_ml/rvc/models/pretraineds/pretraineds_custom\"\n",
        "for url in pretrained_urls:\n",
        "    filename = os.path.join(output_directory, os.path.basename(url))\n",
        "    urllib.request.urlretrieve(url, filename)"
      ]
    }
  ],
  "metadata": {
    "accelerator": "GPU",
    "colab": {
      "collapsed_sections": [
        "3b59-2x-qEnX"
      ],
      "gpuType": "T4",
      "provenance": []
    },
    "kernelspec": {
      "display_name": "Python 3",
      "name": "python3"
    },
    "language_info": {
      "name": "python"
    }
  },
  "nbformat": 4,
  "nbformat_minor": 0
}
