{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import torch\n",
"torch._dynamo.config.cache_size_limit = 64\n",
"torch._dynamo.config.suppress_errors = True\n",
"torch.set_float32_matmul_precision('high')\n",
"\n",
"import ChatTTS\n",
"from IPython.display import Audio"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Load Models"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"chat = ChatTTS.Chat()\n",
"chat.load_models()\n",
"\n",
"# Use force_redownload=True if the weights updated.\n",
"# chat.load_models(force_redownload=True)\n",
"\n",
"# If you download the weights manually, set source='locals'.\n",
"# chat.load_models(source='local', local_path='YOUR LOCAL PATH')"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Inference"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Batch infer"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:ChatTTS.core:All initialized.\n",
" 28%|██▊ | 106/384 [00:00<00:01, 140.38it/s]\n",
" 47%|████▋ | 960/2048 [00:07<00:08, 133.25it/s]\n"
]
}
],
"source": [
"texts = [\"So we found being competitive and collaborative was a huge way of staying motivated towards our goals, so one person to call when you fall off, one person who gets you back on then one person to actually do the activity with.\",]*3 \\\n",
" + [\"我觉得像我们这些写程序的人,他,我觉得多多少少可能会对开源有一种情怀在吧我觉得开源是一个很好的形式。现在其实最先进的技术掌握在一些公司的手里的话,就他们并不会轻易的开放给所有的人用。\"]*3 \n",
" \n",
"wavs = chat.infer(texts)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
" \n",
" "
],
"text/plain": [
""
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"Audio(wavs[0], rate=24_000, autoplay=True)"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
" \n",
" "
],
"text/plain": [
""
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"Audio(wavs[3], rate=24_000, autoplay=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Custom params"
]
},
{
"cell_type": "code",
"execution_count": 6,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:ChatTTS.core:All initialized.\n",
" 14%|█▍ | 53/384 [00:00<00:02, 146.65it/s]\n",
" 22%|██▏ | 452/2048 [00:03<00:11, 140.51it/s]\n"
]
}
],
"source": [
"params_infer_code = {'prompt':'[speed_5]', 'temperature':.3}\n",
"params_refine_text = {'prompt':'[oral_2][laugh_0][break_6]'}\n",
"\n",
"wav = chat.infer('四川美食可多了,有麻辣火锅、宫保鸡丁、麻婆豆腐、担担面、回锅肉、夫妻肺片等,每样都让人垂涎三尺。', \\\n",
" params_refine_text=params_refine_text, params_infer_code=params_infer_code)"
]
},
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
" \n",
" "
],
"text/plain": [
""
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"Audio(wav[0], rate=24_000, autoplay=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### fix random speaker"
]
},
{
"cell_type": "code",
"execution_count": 8,
"metadata": {},
"outputs": [],
"source": [
"rand_spk = chat.sample_random_speaker()\n",
"params_infer_code = {'spk_emb' : rand_spk, }\n",
"\n",
"wav = chat.infer('四川美食确实以辣闻名,但也有不辣的选择。比如甜水面、赖汤圆、蛋烘糕、叶儿粑等,这些小吃口味温和,甜而不腻,也很受欢迎。', \\\n",
" params_refine_text=params_refine_text, params_infer_code=params_infer_code)"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [
{
"data": {
"text/html": [
"\n",
" \n",
" "
],
"text/plain": [
""
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"Audio(wav[0], rate=24_000, autoplay=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"### Two stage control"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:ChatTTS.core:All initialized.\n",
" 23%|██▎ | 87/384 [00:00<00:01, 150.60it/s]\n"
]
},
{
"data": {
"text/plain": [
"['so we found being competitive and collaborative [uv_break] was a huge way of staying [uv_break] motivated towards our goals, [uv_break] so [uv_break] one person to call [uv_break] when you fall off, [uv_break] one person who [uv_break] gets you back [uv_break] on then [uv_break] one person [uv_break] to actually do the activity with.']"
]
},
"execution_count": null,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"text = \"So we found being competitive and collaborative was a huge way of staying motivated towards our goals, so one person to call when you fall off, one person who gets you back on then one person to actually do the activity with.\"\n",
"chat.infer(text, refine_text_only=True)"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:ChatTTS.core:All initialized.\n",
" 49%|████▊ | 995/2048 [00:07<00:07, 141.85it/s]\n"
]
}
],
"source": [
"text = 'so we found being competitive and collaborative [uv_break] was a huge way of staying [uv_break] motivated towards our goals, [uv_break] so [uv_break] one person to call [uv_break] when you fall off, [uv_break] one person who [uv_break] gets you back [uv_break] on then [uv_break] one person [uv_break] to actually do the activity with.'\n",
"wav = chat.infer(text, skip_refine_text=True)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## LLM Call"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [],
"source": [
"from ChatTTS.experimental.llm import llm_api\n",
"\n",
"API_KEY = ''\n",
"client = llm_api(api_key=API_KEY,\n",
" base_url=\"https://api.deepseek.com\",\n",
" model=\"deepseek-chat\")"
]
},
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:httpx:HTTP Request: POST https://api.deepseek.com/chat/completions \"HTTP/1.1 200 OK\"\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"四川美食可多了, 有麻辣火锅、宫保鸡丁、麻婆豆腐、担担面、回锅肉、夫妻肺片、串串香、龙抄手、宜宾燃面、乐山钵钵鸡等, 每样都让人垂涎三尺。\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:httpx:HTTP Request: POST https://api.deepseek.com/chat/completions \"HTTP/1.1 200 OK\"\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"四川美食可多了,有麻辣火锅、宫保鸡丁、麻婆豆腐、担担面、回锅肉、夫妻肺片、串串香、龙抄手、宜宾燃面、乐山钵钵鸡等,每样都让人垂涎三尺。\n"
]
}
],
"source": [
"user_question = '四川有哪些好吃的美食呢?'\n",
"text = client.call(user_question, prompt_version = 'deepseek')\n",
"print(text)\n",
"text = client.call(text, prompt_version = 'deepseek_TN')\n",
"print(text)"
]
},
{
"cell_type": "code",
"execution_count": 14,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"INFO:ChatTTS.core:All initialized.\n",
" 20%|█▉ | 75/384 [00:00<00:02, 144.93it/s]\n",
" 32%|███▏ | 647/2048 [00:04<00:09, 140.27it/s]\n"
]
}
],
"source": [
"params_infer_code = {'spk_emb' : rand_spk, 'temperature':.3}\n",
"\n",
"wav = chat.infer(text, params_infer_code=params_infer_code)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3 (ipykernel)",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.8"
}
},
"nbformat": 4,
"nbformat_minor": 4
}