{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "59dc4015",
   "metadata": {},
   "source": [
    "# Translation\n",
    "使用 MindSpore NLP 实现基于 `t5-small` 的机器翻译功能。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "385692ff",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[WARNING] DEVICE(45101,e7fff99e4020,python):2025-09-24-14:58:07.509.196 [mindspore/ccsrc/utils/dlopen_macro.h:165] DlsymAscend] Dynamically load symbol aclprofGetSupportedFeaturesV2 failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libmsprofiler.so: undefined symbol: aclprofGetSupportedFeaturesV2\n",
      "[WARNING] DEVICE(45101,e7fff99e4020,python):2025-09-24-14:58:07.509.380 [mindspore/ccsrc/utils/dlopen_macro.h:165] DlsymAscend] Dynamically load symbol aclrtEventGetTimestamp failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libascendcl.so: undefined symbol: aclrtEventGetTimestamp\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:549: UserWarning: The value of the smallest subnormal for <class 'numpy.float64'> type is zero.\n",
      "  setattr(self, word, getattr(machar, word).flat[0])\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:89: UserWarning: The value of the smallest subnormal for <class 'numpy.float64'> type is zero.\n",
      "  return self._float_to_str(self.smallest_subnormal)\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:549: UserWarning: The value of the smallest subnormal for <class 'numpy.float32'> type is zero.\n",
      "  setattr(self, word, getattr(machar, word).flat[0])\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:89: UserWarning: The value of the smallest subnormal for <class 'numpy.float32'> type is zero.\n",
      "  return self._float_to_str(self.smallest_subnormal)\n",
      "[WARNING] ME(45101:255086590574624,MainProcess):2025-09-24-14:58:13.833.706 [mindspore/context.py:1402] For 'context.set_context', the parameter 'ascend_config' will be deprecated and removed in a future version. Please use the api mindspore.device_context.ascend.op_precision.precision_mode(),\n",
      "                                                       mindspore.device_context.ascend.op_precision.op_precision_mode(),\n",
      "                                                       mindspore.device_context.ascend.op_precision.matmul_allow_hf32(),\n",
      "                                                       mindspore.device_context.ascend.op_precision.conv_allow_hf32(),\n",
      "                                                       mindspore.device_context.ascend.op_tuning.op_compile() instead.\n",
      "Building prefix dict from the default dictionary ...\n",
      "Loading model from cache /tmp/jieba.cache\n",
      "Loading model cost 1.489 seconds.\n",
      "Prefix dict has been built successfully.\n"
     ]
    }
   ],
   "source": [
    "import logging\n",
    "from typing import Optional\n",
    "from mindnlp.transformers import pipeline\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "64fbafd6",
   "metadata": {},
   "outputs": [],
   "source": [
    "# ----------------------\n",
    "# 日志配置\n",
    "# ----------------------\n",
    "logging.basicConfig(\n",
    "    level=logging.INFO,\n",
    "    format=\"%(asctime)s [%(levelname)s] %(message)s\"\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "21e7abbf",
   "metadata": {},
   "outputs": [],
   "source": [
    "class Translator:\n",
    "    \"\"\"翻译封装类 (Jupyter 版)\"\"\"\n",
    "\n",
    "    def __init__(self, model_path: str = \"google-t5/t5-small\"):\n",
    "        \"\"\"\n",
    "        初始化翻译器\n",
    "        :param model_path: 本地或远程模型路径\n",
    "        \"\"\"\n",
    "        self.model_path = model_path\n",
    "        self.pipe = self._load_model()\n",
    "\n",
    "    def _load_model(self):\n",
    "        try:\n",
    "            logging.info(f\"加载翻译模型中...\")\n",
    "            pipe = pipeline(task=\"translation\", model=self.model_path)\n",
    "            logging.info(\"模型加载成功 ✅\")\n",
    "            return pipe\n",
    "        except Exception as e:\n",
    "            logging.error(f\"模型加载失败: {e}\")\n",
    "            raise\n",
    "\n",
    "    def translate(self, text: str, src_lang: str = \"en\", tgt_lang: str = \"de\") -> Optional[str]:\n",
    "        \"\"\"\n",
    "        翻译函数\n",
    "        :param text: 待翻译的文本\n",
    "        :param src_lang: 源语言 (可选，实际依赖模型支持)\n",
    "        :param tgt_lang: 目标语言 (可选，实际依赖模型支持)\n",
    "        :return: 翻译后的文本\n",
    "        \"\"\"\n",
    "        # 一些模型可能需要在 prompt 里指定语言\n",
    "        prompt = f\"translate {src_lang} to {tgt_lang}: {text}\"\n",
    "        try:\n",
    "            result = self.pipe(prompt)\n",
    "            # pipeline 返回的结果通常是 [{'translation_text': 'xxx'}]\n",
    "            translation = result[0].get('translation_text', None)\n",
    "            logging.info(f\"翻译结果: {translation}\")\n",
    "            return translation\n",
    "        except Exception as e:\n",
    "            logging.error(f\"推理失败: {e}\")\n",
    "            return None"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fc980851",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-09-24 14:58:25,022 [INFO] 加载翻译模型中...\n",
      "T5ForConditionalGeneration has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`.`PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.\n",
      "  - If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).\n",
      "  - If you are not the owner of the model architecture class, please contact the model code owner to update it.\n",
      "[WARNING] DEVICE(45101,e7ff3f39a120,python):2025-09-24-14:58:25.187.899 [mindspore/ccsrc/plugin/res_manager/ascend/mem_manager/ascend_memory_adapter.cc:123] Initialize] Free memory size is less than half of total memory size.Device 0 Device MOC total size:16367894528 Device MOC free size:8066490368 may be other processes occupying this card, check as: ps -ef|grep python\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/mindnlp/transformers/pipelines/__init__.py:1023: UserWarning: \"translation\" task was used, instead of \"translation_XX_to_YY\", defaulting to \"translation_en_to_de\"\n",
      "  warnings.warn(\n",
      "2025-09-24 14:58:55,749 [INFO] 模型加载成功 ✅\n"
     ]
    }
   ],
   "source": [
    "translator = Translator(\"google-t5/t5-small\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "1653854f",
   "metadata": {},
   "outputs": [],
   "source": [
    "sentence = \"I love China, I love Huawei, and Huawei makes my life better.\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "9ad3369f",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "Your input_length: 22 is bigger than 0.9 * max_length: 20. You might consider increasing your max_length manually, e.g. translator('...', max_length=400)\n",
      "2025-09-24 14:59:02,275 [INFO] 翻译结果: de: Ich liebe China, ich liebe Huawei, und Huawei macht mein Leben besser.\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "翻译结果： de: Ich liebe China, ich liebe Huawei, und Huawei macht mein Leben besser.\n"
     ]
    }
   ],
   "source": [
    "result = translator.translate(sentence, src_lang=\"en\", tgt_lang=\"de\")\n",
    "print(\"翻译结果：\", result)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Mindspore",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.20"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
