{
 "cells": [
  {
   "cell_type": "markdown",
   "id": "59dc4015",
   "metadata": {},
   "source": [
    "# Summarization\n",
    "使用 MindSpore NLP 实现基于 `PromptCLUE` 的文本摘要。"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "385692ff",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[WARNING] DEVICE(5716,e7ffc0017020,python):2025-09-24-00:24:38.394.084 [mindspore/ccsrc/utils/dlopen_macro.h:165] DlsymAscend] Dynamically load symbol aclprofGetSupportedFeaturesV2 failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libmsprofiler.so: undefined symbol: aclprofGetSupportedFeaturesV2\n",
      "[WARNING] DEVICE(5716,e7ffc0017020,python):2025-09-24-00:24:38.394.278 [mindspore/ccsrc/utils/dlopen_macro.h:165] DlsymAscend] Dynamically load symbol aclrtEventGetTimestamp failed, result = /usr/local/Ascend/ascend-toolkit/latest/lib64/libascendcl.so: undefined symbol: aclrtEventGetTimestamp\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:549: UserWarning: The value of the smallest subnormal for <class 'numpy.float64'> type is zero.\n",
      "  setattr(self, word, getattr(machar, word).flat[0])\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:89: UserWarning: The value of the smallest subnormal for <class 'numpy.float64'> type is zero.\n",
      "  return self._float_to_str(self.smallest_subnormal)\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:549: UserWarning: The value of the smallest subnormal for <class 'numpy.float32'> type is zero.\n",
      "  setattr(self, word, getattr(machar, word).flat[0])\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/numpy/core/getlimits.py:89: UserWarning: The value of the smallest subnormal for <class 'numpy.float32'> type is zero.\n",
      "  return self._float_to_str(self.smallest_subnormal)\n",
      "[WARNING] ME(5716:255085623996448,MainProcess):2025-09-24-00:24:47.144.267 [mindspore/context.py:1402] For 'context.set_context', the parameter 'ascend_config' will be deprecated and removed in a future version. Please use the api mindspore.device_context.ascend.op_precision.precision_mode(),\n",
      "                                                       mindspore.device_context.ascend.op_precision.op_precision_mode(),\n",
      "                                                       mindspore.device_context.ascend.op_precision.matmul_allow_hf32(),\n",
      "                                                       mindspore.device_context.ascend.op_precision.conv_allow_hf32(),\n",
      "                                                       mindspore.device_context.ascend.op_tuning.op_compile() instead.\n",
      "Building prefix dict from the default dictionary ...\n",
      "Loading model from cache /tmp/jieba.cache\n",
      "Loading model cost 1.533 seconds.\n",
      "Prefix dict has been built successfully.\n"
     ]
    }
   ],
   "source": [
    "import logging\n",
    "from typing import Optional\n",
    "from mindnlp.transformers import pipeline\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "64fbafd6",
   "metadata": {},
   "outputs": [],
   "source": [
    "# ----------------------\n",
    "# 日志配置\n",
    "# ----------------------\n",
    "logging.basicConfig(\n",
    "    level=logging.INFO,\n",
    "    format=\"%(asctime)s [%(levelname)s] %(message)s\"\n",
    ")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "21e7abbf",
   "metadata": {},
   "outputs": [],
   "source": [
    "class Summarizer:\n",
    "    \"\"\"文本摘要封装类 (Jupyter 版)\"\"\"\n",
    "\n",
    "    def __init__(self, model_path: str = \"ClueAI/PromptCLUE\"):\n",
    "        self.model_path = model_path\n",
    "        self.pipe = self._load_model()\n",
    "\n",
    "    def _load_model(self):\n",
    "        try:\n",
    "            logging.info(f\"加载模型中...\")\n",
    "            pipe = pipeline(task=\"summarization\", model=self.model_path)\n",
    "            logging.info(\"模型加载成功 ✅\")\n",
    "            return pipe\n",
    "        except Exception as e:\n",
    "            logging.error(f\"模型加载失败: {e}\")\n",
    "            raise\n",
    "\n",
    "    def summarize(self, text: str) -> Optional[str]:\n",
    "        \"\"\"对输入文本生成摘要\"\"\"\n",
    "        prompt = f\"为下面的文章生成摘要：\\n{text}\"\n",
    "        try:\n",
    "            result = self.pipe(prompt)\n",
    "            summary= result[0]['summary_text']\n",
    "            logging.info(f\"生成摘要: {summary}\")\n",
    "            return summary\n",
    "        except Exception as e:\n",
    "            logging.error(f\"推理失败: {e}\")\n",
    "            return None"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "fc980851",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-09-24 00:25:02,906 [INFO] 加载模型中...\n",
      "T5ForConditionalGeneration has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`.`PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.\n",
      "  - If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).\n",
      "  - If you are not the owner of the model architecture class, please contact the model code owner to update it.\n",
      "[WARNING] DEVICE(5716,e7ff0e2be120,python):2025-09-24-00:25:15.144.075 [mindspore/ccsrc/plugin/res_manager/ascend/mem_manager/ascend_memory_adapter.cc:123] Initialize] Free memory size is less than half of total memory size.Device 0 Device MOC total size:16367894528 Device MOC free size:6859898880 may be other processes occupying this card, check as: ps -ef|grep python\n",
      "You are using the default legacy behaviour of the <class 'mindnlp.transformers.models.t5.tokenization_t5.T5Tokenizer'>. This is expected, and simply means that the `legacy` (previous) behavior will be used so nothing changes for you. If you want to use the new behaviour, set `legacy=False`. This should only be set if you understand what it means, and thoroughly read the reason why this was added as explained in https://github.com/huggingface/transformers/pull/24565\n",
      "/usr/local/miniconda3/envs/Mindspore/lib/python3.9/site-packages/mindnlp/transformers/tokenization_utils_base.py:1526: FutureWarning: `clean_up_tokenization_spaces` was not set. It will be set to `True` by default. This behavior will be depracted, and will be then set to `False` by default. \n",
      "  warnings.warn(\n",
      "2025-09-24 00:25:56,681 [INFO] 模型加载成功 ✅\n"
     ]
    }
   ],
   "source": [
    "summarizer = Summarizer(\"ClueAI/PromptCLUE\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "id": "1653854f",
   "metadata": {},
   "outputs": [],
   "source": [
    "article = \"\"\"北京时间9月5日12时52分，四川甘孜藏族自治州泸定县发生6.8级地震。\n",
    "地震发生后，领导高度重视并作出重要指示，要求把抢救生命作为首要任务，\n",
    "全力救援受灾群众，最大限度减少人员伤亡。\"\"\""
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 6,
   "id": "9ad3369f",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "2025-09-24 00:26:19,534 [INFO] 生成摘要: 四川省甘孜藏族自治州泸定县发生6.8级地震\n"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "摘要结果： 四川省甘孜藏族自治州泸定县发生6.8级地震\n"
     ]
    }
   ],
   "source": [
    "summary = summarizer.summarize(article)\n",
    "print(\"摘要结果：\", summary)"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Mindspore",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.20"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
