{
 "cells": [
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-01-09T19:45:06.362023Z",
     "start_time": "2025-01-09T19:45:05.183920Z"
    }
   },
   "cell_type": "code",
   "source": [
    "from sklearn.base import BaseEstimator, TransformerMixin\n",
    "import pandas as pd\n",
    "import numpy as np\n",
    "from sklearn.feature_extraction.text import TfidfVectorizer\n",
    "from sklearn.feature_extraction.text import  HashingVectorizer\n",
    "from sklearn.feature_extraction.text import  CountVectorizer\n",
    "from sklearn.preprocessing import MultiLabelBinarizer\n",
    "from sklearn.model_selection import train_test_split, cross_val_score\n",
    "from sklearn.pipeline import Pipeline\n",
    "from sklearn.multioutput import MultiOutputClassifier\n",
    "from sklearn.linear_model import LogisticRegression\n",
    "from sklearn.tree import DecisionTreeClassifier\n",
    "from sklearn.metrics import classification_report\n",
    "from collections import Counter\n",
    "from sklearn.neighbors import KNeighborsClassifier\n",
    "from sklearn.ensemble import RandomForestClassifier\n",
    "from sklearn.ensemble import ExtraTreesClassifier\n",
    "from sklearn.naive_bayes import MultinomialNB\n",
    "import jieba\n",
    "import re\n",
    "# 初始化参数\n",
    "df = pd.read_excel('D:\\python\\pytorch-learn\\demo.xlsx',sheet_name='demo',usecols=[0,1])\n",
    "meta = pd.read_excel('D:\\python\\pytorch-learn\\demo.xlsx',sheet_name='meta',usecols=[0,1],dtype=str)\n",
    "\n",
    "# code  = pd.read_excel('D:\\python\\pytorch-learn\\code.xlsx',sheet_name='基础编码',usecols=[0,1,2,3,4])\n",
    "df['code'] = df['code'].apply(lambda x: x.split('-'))\n",
    "all_labels = [item for sublist in df['code'] for item in sublist]\n",
    "label_counts = Counter(all_labels)\n",
    "rare_labels = {label for label, count in label_counts.items() if count == 1}\n",
    "jieba.add_word(\"1.5米\")\n",
    "jieba.add_word(\"1米\")\n",
    "jieba.add_word(\"2米\")\n",
    "jieba.add_word(\"1.8米\")\n",
    "jieba.add_word(\"线\")\n",
    "jieba.add_word(\"100w\")\n",
    "jieba.add_word(\"88w\")\n",
    "jieba.add_word(\"66w\")\n",
    "jieba.add_word(\"67w\")\n",
    "jieba.add_word(\"80w\")\n",
    "jieba.add_word(\"200w\")\n",
    "jieba.add_word(\"60w\")\n",
    "jieba.add_word(\"45w\")\n",
    "jieba.add_word(\"cc\")\n",
    "jieba.add_word(\"120w\")\n",
    "jieba.add_word(\"88w\")\n",
    "jieba.add_word(\"荣max\")\n",
    "jieba.add_word(\"荣\")\n",
    "jieba.add_word(\"max\")\n",
    "jieba.add_word(\"30w\")\n",
    "jieba.add_word(\"65w\")\n",
    "jieba.add_word(\"op\")\n",
    "jieba.add_word(\"oppo\")\n",
    "jieba.add_word(\"mi\")\n",
    "jieba.add_word(\"mini\")\n",
    "jieba.add_word(\"mirco\")\n",
    "jieba.add_word(\"vo\")\n",
    "jieba.add_word(\"v\")\n",
    "jieba.add_word(\"240w\")\n",
    "jieba.add_word(\"pro\")\n",
    "#  '14' '15' '150' '150w' '1516' '16' '18w' '1米' '2' '200w' '20w' '22.5' '240w' '25w' '29w' '29wtypec' '2a' '2米' '3' '30w' '33w' '35w' '4' '40w'\n",
    "jieba.add_word(\"150w\")\n",
    "jieba.add_word(\"22.5w\")\n",
    "jieba.add_word(\"18w\")\n",
    "jieba.add_word(\"30w\")\n",
    "jieba.add_word(\"33w\")\n",
    "jieba.add_word(\"35w\")\n",
    "jieba.add_word(\"40w\")\n",
    "jieba.add_word(\"29w\")\n",
    "jieba.add_word(\"typec\")\n",
    "jieba.add_word(\"tpc\")\n",
    "jieba.add_word(\"大壳头\")\n",
    "jieba.add_word(\"头\")\n",
    "jieba.add_word(\"10a\")\n",
    "jieba.add_word(\"6a\")\n",
    "jieba.add_word(\"5a\")\n",
    "jieba.add_word(\"85w\")\n",
    "jieba.add_word(\"max\")\n",
    "jieba.add_word(\"usb\")\n",
    "jieba.add_word(\"d\")\n",
    "jieba.add_word(\"g\")\n",
    "jieba.add_word(\"plus\")\n",
    "jieba.add_word(\"18w\")\n",
    "jieba.add_word(\"12w\")\n",
    "jieba.add_word(\"55w\")\n",
    "jieba.add_word(\"90w\")\n",
    "jieba.add_word(\"55w\")\n",
    "jieba.add_word(\"44w\")\n",
    "jieba.add_word(\"10w\")\n",
    "jieba.add_word(\"快充头\")\n",
    "jieba.add_word(\"快充\")\n",
    "jieba.add_word(\"闪充头\")\n",
    "jieba.add_word(\"闪充\")\n",
    "jieba.add_word(\"安卓线\")\n",
    "jieba.add_word(\"安卓\")\n",
    "jieba.add_word(\"华\")\n",
    "jieba.add_word(\"双口\")\n",
    "jieba.add_word(\"c口\")\n",
    "jieba.add_word(\"单口\")\n",
    "jieba.add_word(\"华为\",freq=0)\n",
    "jieba.add_word(\"ac\")\n",
    "jieba.add_word(\"vvivo\",freq=0)\n",
    "stop_words=[\"【\", \"】\", \"+\",\"（\",\"）\",\"(\",\")\",\" \",\"*\",\",\",\"/\",\"=\",\"|\"]\n",
    "#stop_words=[]\n",
    "def chinese_tokenizer(context):\n",
    "# 使用正则表达式去除自定义停止词\n",
    "    for stop_word in stop_words:\n",
    "        context = context.replace(stop_word, ' ')\n",
    "    context = context.replace('-', '')\n",
    "    context = context.replace('vivo', 'vvivo')\n",
    "    context = context.lower()\n",
    "    return list(jieba.cut(context))\n",
    "\n",
    "\n",
    "vectorizer = CountVectorizer(\n",
    "    analyzer=chinese_tokenizer,\n",
    "    token_pattern=None,\n",
    "    lowercase=True,\n",
    "    binary=False\n",
    ")\n",
    "\n",
    "md = MultiOutputClassifier(DecisionTreeClassifier(criterion=\"entropy\",random_state=42,max_depth=None,max_features=None))\n",
    "pipeline = Pipeline([\n",
    "    ('tfidf', vectorizer),\n",
    "    ('clf', md)\n",
    "])\n",
    "\n",
    "\n",
    "# 创建一个新的 DataFrame 来保存稀有类别的样本\n",
    "df_rare = df[df['code'].apply(lambda x: any(item in rare_labels for item in x))]\n",
    "\n",
    "# 从原始 DataFrame 中移除这些稀有类别的样本\n",
    "df_common = df.drop(df_rare.index)\n",
    "mlb = MultiLabelBinarizer()\n",
    "meta['code'] = meta['code'].apply(lambda x:[x])\n",
    "mlb.fit(meta['code'])\n",
    "#mlb.fit(df_common['code'])\n",
    "Y_common = mlb.transform(df_common['code'])\n",
    "# 拆分常见类别的数据集\n",
    "X_train_common, X_test_common, Y_train_common, Y_test_common = train_test_split(\n",
    "    df_common['name'], Y_common, test_size=0.01, random_state=45\n",
    ")\n",
    "# 将稀有类别的样本添加到训练集中\n",
    "X_train_rare = df_rare['name']\n",
    "Y_train_rare = mlb.transform(df_rare['code'])\n",
    "# print(X_train_rare.shape, Y_train_rare.shape)\n",
    "# 合并训练集\n",
    "X_train = pd.concat([meta['name'],X_train_common, X_train_rare], ignore_index=True)\n",
    "Y_train = np.vstack([mlb.transform(meta['code']),Y_train_common, Y_train_rare])\n",
    "print(X_train.shape, Y_train.shape)\n",
    "# 测试集保持不变\n",
    "X_test = X_test_common\n",
    "Y_test = Y_test_common\n",
    "#print(mlb.classes_)\n",
    "pipeline.fit(X_train, Y_train)\n",
    "Y_pred = pipeline.predict(X_test)\n",
    "print(pipeline.score(X_train, Y_train))\n",
    "print(pipeline.score(X_test, Y_test))\n",
    "\n",
    "# 打印特征名称（词汇表）\n",
    "print(\"Feature names:\", vectorizer.get_feature_names_out())\n",
    "print(classification_report(Y_test, Y_pred, target_names=mlb.classes_))\n",
    "X_pre = [\"华为全兼容1.5米快充线6A\"]\n",
    "n_pred = pipeline.predict(X_pre)\n",
    "predicted_codes = mlb.inverse_transform(n_pred)\n",
    "# print(cross_val_score(pipeline, X_train, Y_train, cv=5))\n",
    "for name, codes in zip(X_pre, predicted_codes):\n",
    "   print(f\"Product Name: {name} -> Predicted Codes: {', '.join(codes)}\")\n",
    "\n"
   ],
   "id": "7783786bd438721",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "(3050,) (3050, 166)\n",
      "0.9983606557377049\n",
      "0.8333333333333334\n",
      "Feature names: [' ' '1' '1.5' '1.5米' '1.8米' '100' '100w' '10a' '10w' '120' '120w' '12a'\n",
      " '12w' '14' '15' '150' '150w' '16' '18w' '1米' '2' '200w' '20w' '22.5w'\n",
      " '240w' '25w' '29w' '2a' '2米' '3' '30w' '33w' '35w' '4' '40w' '44w' '45'\n",
      " '45w' '4a' '5' '55w' '5a' '5v2a' '5v4a' '60w' '614' '61w' '65w' '66'\n",
      " '66w' '67' '67w' '6a' '7' '70w' '8' '80' '80w' '814' '85w' '87w' '88'\n",
      " '88w' '8a' '9' '90' '90w' '96w' 'a' 'a1181' 'a1278' 'a1286' 'a1343'\n",
      " 'a1369' 'a1370' 'a1398' 'a1424' 'a1425' 'a1465' 'a1466' 'a1502' 'a1534'\n",
      " 'a1706' 'a1707' 'a1708' 'a1932' 'a1990' 'a2141' 'a2166' 'a2363' 'a2518'\n",
      " 'c' 'cc' 'cro' 'cto' 'ctoc' 'c口' 'd' 'e' 'findx6' 'findx8' 'g' 'h' 'hw'\n",
      " 'hw1' 'iapd' 'ic' 'ipad' 'iq' 'iqoo' 'iqv' 'k10' 'k11' 'k11x' 'k20' 'k5'\n",
      " 'k50' 'k60' 'k7' 'k70' 'k80' 'k9' 'l' 'm' 'max' 'mi' 'mini' 'mirco'\n",
      " 'note7' 'oc' 'op' 'oppo' 'pd' 'pd20w' 'plus' 'pro' 'r9' 'reno10' 'reno11'\n",
      " 'reno12' 'reno9' 'ry' 'sam' 't' 'to' 'tpc' 'typec' 'usb' 'v' 'v120' 'v33'\n",
      " 'vi' 'vo' 'vvi' 'x20' 'x70' 'x70t' 'x9' 'y' '一个' '一体' '一加' '一嘉' '一条' '三星'\n",
      " '三条' '不' '不伤机' '专用' '两条' '为' '为主' '主推' '以上' '件' '充' '充头' '充电' '充电器' '充线'\n",
      " '光速' '全' '全系列' '公' '兼容' '冰块' '冲双' '冲头' '准' '加' '加一嘉' '加加' '加长' '加长版'\n",
      " '努比亚' '华' '华双' '单' '单个' '单头' '单条' '单线' '原' '双' '双口' '双头' '双引擎' '发' '发华'\n",
      " '口' '口头' '口米' '口线' '后面' '含线' '型' '型口' '外观' '大壳' '大壳头' '头' '头全' '头大壳' '头米'\n",
      " '头线' '头高功' '套' '套装' '如果' '安卓' '安卓线' '对公' '小' '小壳' '小数点' '小米' '平板' '平果'\n",
      " '弯头' '快充' '快充头' '手机' '折叠' '接口' '推' '推全' '数据线' '新款' '星' '星线' '是' '显示' '普通'\n",
      " '条' '条一加' '条装' '极速' '标准版' '标线' '梯形' '正品' '正常' '氮化' '没有' '爆款' '电' '电线' '白'\n",
      " '白口' '白头' '白色' '直头' '看' '秒' '竞版' '笔记本' '笔记本电脑' '米' '米线' '米金' '系列' '紫口线'\n",
      " '红米' '红线' '红色' '线' '线华为' '线双' '线米' '线紫口' '绿口' '绿口线' '编制' '编织' '老款' '胶囊'\n",
      " '苹果' '荣max' '荣耀' '蓝标' '装' '装配' '角小' '超级' '转' '适用' '适配器' '通用' '邮费' '配双'\n",
      " '金' '金标' '金标头' '镓' '闪充' '闪充头' '高' '高攻' '鲨' '黄' '黄口' '黑' '黑头' '黑色']\n",
      "              precision    recall  f1-score   support\n",
      "\n",
      "  1010010001       0.00      0.00      0.00         0\n",
      "  1010010002       0.00      0.00      0.00         0\n",
      "  1010010003       0.00      0.00      0.00         0\n",
      "  1010020001       0.00      0.00      0.00         0\n",
      "  1010020002       0.00      0.00      0.00         0\n",
      "  1010020003       0.00      0.00      0.00         0\n",
      "  1010150001       0.00      0.00      0.00         0\n",
      "  1010150002       0.00      0.00      0.00         0\n",
      "  1010150003       0.00      0.00      0.00         0\n",
      "  1020010004       1.00      1.00      1.00         1\n",
      "  1020010005       0.00      0.00      0.00         0\n",
      "  1020010006       0.00      0.00      0.00         0\n",
      "  1020020004       0.00      0.00      0.00         0\n",
      "  1020020005       0.00      0.00      0.00         0\n",
      "  1020020006       1.00      1.00      1.00         1\n",
      "  1020150004       0.00      0.00      0.00         0\n",
      "  1020150005       0.00      0.00      0.00         0\n",
      "  1020150006       0.00      0.00      0.00         0\n",
      "  1030010007       1.00      0.50      0.67         2\n",
      "  1030010008       0.00      0.00      0.00         0\n",
      "  1030010009       0.00      0.00      0.00         0\n",
      "  1030010010       0.00      0.00      0.00         0\n",
      "  1030020007       1.00      1.00      1.00         2\n",
      "  1030020008       0.00      0.00      0.00         0\n",
      "  1030020009       0.00      0.00      0.00         0\n",
      "  1030020010       0.00      0.00      0.00         0\n",
      "  1030150007       1.00      1.00      1.00         2\n",
      "  1030150010       0.00      0.00      0.00         0\n",
      "  1040010011       0.00      0.00      0.00         0\n",
      "  1040010012       1.00      1.00      1.00         2\n",
      "  1040010013       1.00      1.00      1.00         1\n",
      "  1040020011       0.00      0.00      0.00         0\n",
      "  1040020012       0.75      0.75      0.75         4\n",
      "  1040020013       1.00      0.50      0.67         2\n",
      "  1040030012       0.00      0.00      0.00         0\n",
      "  1040150011       1.00      1.00      1.00         1\n",
      "  1040150012       1.00      1.00      1.00         2\n",
      "  1040150013       0.00      0.00      0.00         0\n",
      "  1050010014       0.00      0.00      0.00         0\n",
      "  1050010015       0.00      0.00      0.00         0\n",
      "  1050020014       0.00      0.00      0.00         0\n",
      "  1050020015       0.00      0.00      0.00         0\n",
      "  1050150014       0.00      0.00      0.00         0\n",
      "  1050150015       0.00      0.00      0.00         0\n",
      "  1070010017       0.00      0.00      0.00         0\n",
      "  1070010109       0.00      0.00      0.00         0\n",
      "  1070020017       0.00      0.00      0.00         0\n",
      "  1070020109       0.00      0.00      0.00         0\n",
      "  1070150017       0.00      0.00      0.00         0\n",
      "  1070150109       0.00      0.00      0.00         0\n",
      "  1080010018       0.00      0.00      0.00         0\n",
      "  1080010019       0.50      1.00      0.67         1\n",
      "  1080010020       1.00      0.50      0.67         2\n",
      "  1080010021       0.00      0.00      0.00         0\n",
      "  1080020018       0.00      0.00      0.00         0\n",
      "  1080020019       1.00      1.00      1.00         1\n",
      "  1080020020       1.00      1.00      1.00         1\n",
      "  1080020021       0.00      0.00      0.00         1\n",
      "  1080030019       0.00      0.00      0.00         0\n",
      "  1080150018       0.00      0.00      0.00         0\n",
      "  1080150019       0.00      0.00      0.00         0\n",
      "  1080150020       1.00      1.00      1.00         1\n",
      "  1080150021       0.00      0.00      0.00         0\n",
      "  1100010016       0.00      0.00      0.00         0\n",
      "  1100020016       0.00      0.00      0.00         0\n",
      "  1100150016       0.00      0.00      0.00         0\n",
      "  1110010026       1.00      1.00      1.00         1\n",
      "  1110010027       0.00      0.00      0.00         0\n",
      "  1110020026       0.00      0.00      0.00         0\n",
      "  1110020027       0.00      0.00      0.00         0\n",
      "  1110150026       1.00      1.00      1.00         1\n",
      "  1110150027       0.00      0.00      0.00         0\n",
      "  1130010022       0.00      0.00      0.00         0\n",
      "  1130010023       0.00      0.00      0.00         0\n",
      "  1130010024       1.00      1.00      1.00         2\n",
      "  1130010025       0.00      0.00      0.00         0\n",
      "  1130020022       0.00      0.00      0.00         0\n",
      "  1130020023       0.00      0.00      0.00         0\n",
      "  1130020024       1.00      1.00      1.00         1\n",
      "  1130020025       0.00      0.00      0.00         0\n",
      "  1130150022       0.00      0.00      0.00         0\n",
      "  1130150023       0.00      0.00      0.00         0\n",
      "  1130150024       1.00      1.00      1.00         1\n",
      "  1130150025       0.00      0.00      0.00         0\n",
      "  2010650046       0.00      0.00      0.00         0\n",
      "  2012250042       0.00      0.00      0.00         0\n",
      "  2012250043       1.00      1.00      1.00         1\n",
      "  2012250044       0.00      0.00      0.00         0\n",
      "  2012250045       0.00      0.00      0.00         0\n",
      "  2020120107       0.00      0.00      0.00         0\n",
      "  2020300105       0.00      0.00      0.00         0\n",
      "  2020650028       0.00      0.00      0.00         0\n",
      "  2020650029       0.00      0.00      0.00         0\n",
      "  2020650030       0.00      0.00      0.00         0\n",
      "  2020650031       0.00      0.00      0.00         0\n",
      "  2020650032       0.00      0.00      0.00         0\n",
      "  2020650033       0.00      0.00      0.00         0\n",
      "  2020650034       0.00      0.00      0.00         0\n",
      "  2020650035       0.00      0.00      0.00         0\n",
      "  2020650036       0.00      0.00      0.00         0\n",
      "  2020650037       0.00      0.00      0.00         0\n",
      "  2020650038       0.00      0.00      0.00         0\n",
      "  2030180098       0.00      0.00      0.00         0\n",
      "  2030400091       0.00      0.00      0.00         0\n",
      "  2030400099       0.00      0.00      0.00         0\n",
      "  2032250092       1.00      1.00      1.00         1\n",
      "  2032250093       1.00      1.00      1.00         1\n",
      "  2032250094       1.00      1.00      1.00         1\n",
      "  2032250095       0.00      0.00      0.00         0\n",
      "  2032250096       1.00      1.00      1.00         1\n",
      "  2032250097       0.00      0.00      0.00         0\n",
      "  2032250106       0.00      0.00      0.00         0\n",
      "  2040100059       0.00      0.00      0.00         0\n",
      "  2040180058       0.00      0.00      0.00         0\n",
      "  2040200041       0.00      0.00      0.00         0\n",
      "  2040400048       1.00      1.00      1.00         2\n",
      "  2040400050       1.00      1.00      1.00         1\n",
      "  2040400052       0.00      0.00      0.00         0\n",
      "  2040400054       0.00      0.00      0.00         0\n",
      "  2040400055       0.00      0.00      0.00         0\n",
      "  2040400057       0.00      0.00      0.00         0\n",
      "  2040400061       0.00      0.00      0.00         0\n",
      "  2042250039       1.00      1.00      1.00         1\n",
      "  2042250040       1.00      1.00      1.00         1\n",
      "  2042250041       0.00      0.00      0.00         0\n",
      "  2042250047       0.00      0.00      0.00         0\n",
      "  2042250049       0.00      0.00      0.00         0\n",
      "  2042250051       0.00      0.00      0.00         0\n",
      "  2042250053       0.00      0.00      0.00         0\n",
      "  2042250056       0.00      0.00      0.00         0\n",
      "  2042250060       0.00      0.00      0.00         0\n",
      "  2042250090       1.00      1.00      1.00         2\n",
      "  2070100108       0.00      0.00      0.00         0\n",
      "  2080100073       0.00      0.00      0.00         0\n",
      "  2080180068       0.00      0.00      0.00         0\n",
      "  2080180069       0.00      0.00      0.00         0\n",
      "  2080650067       0.00      0.00      0.00         0\n",
      "  2082250063       0.00      0.00      0.00         0\n",
      "  2082250064       1.00      1.00      1.00         3\n",
      "  2082250065       1.00      1.00      1.00         1\n",
      "  2082250066       0.00      0.00      0.00         0\n",
      "  2082250070       0.00      0.00      0.00         0\n",
      "  2082250071       1.00      1.00      1.00         1\n",
      "  2082250072       1.00      1.00      1.00         2\n",
      "  2100650062       0.00      0.00      0.00         0\n",
      "  2100650100       0.00      0.00      0.00         0\n",
      "  2110250076       0.00      0.00      0.00         0\n",
      "  2112250074       0.00      0.00      0.00         0\n",
      "  2112250075       0.00      0.00      0.00         0\n",
      "  2120200078       0.00      0.00      0.00         0\n",
      "  2120300077       0.00      0.00      0.00         0\n",
      "  2130100088       0.00      0.00      0.00         0\n",
      "  2130200085       0.00      0.00      0.00         0\n",
      "  2130650084       0.00      0.00      0.00         0\n",
      "  2130650086       0.00      0.00      0.00         0\n",
      "  2132250079       0.00      0.00      0.00         0\n",
      "  2132250080       1.00      1.00      1.00         1\n",
      "  2132250081       0.00      0.00      0.00         0\n",
      "  2132250082       1.00      1.00      1.00         1\n",
      "  2132250083       0.00      0.00      0.00         0\n",
      "  2132250087       0.00      0.00      0.00         0\n",
      "  2132250089       0.00      0.00      0.00         0\n",
      "  2140300101       0.00      0.00      0.00         0\n",
      "  2140450104       0.00      0.00      0.00         0\n",
      "  2140650103       0.00      0.00      0.00         0\n",
      "  2142250102       0.00      0.00      0.00         0\n",
      "\n",
      "   micro avg       0.96      0.91      0.93        54\n",
      "   macro avg       0.22      0.21      0.21        54\n",
      "weighted avg       0.95      0.91      0.92        54\n",
      " samples avg       0.90      0.88      0.89        54\n",
      "\n",
      "Product Name: 华为全兼容1.5米快充线6A -> Predicted Codes: 1040150012\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in labels with no predicted samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n",
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: Recall is ill-defined and being set to 0.0 in labels with no true samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n",
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: F-score is ill-defined and being set to 0.0 in labels with no true nor predicted samples. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n",
      "D:\\python\\pytorch-learn\\venv\\Lib\\site-packages\\sklearn\\metrics\\_classification.py:1565: UndefinedMetricWarning: Precision is ill-defined and being set to 0.0 in samples with no predicted labels. Use `zero_division` parameter to control this behavior.\n",
      "  _warn_prf(average, modifier, f\"{metric.capitalize()} is\", len(result))\n"
     ]
    }
   ],
   "execution_count": 185
  },
  {
   "metadata": {},
   "cell_type": "code",
   "outputs": [],
   "execution_count": null,
   "source": "df",
   "id": "bd93435717744bd3"
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-01-09T19:43:55.409976Z",
     "start_time": "2025-01-09T19:43:55.403958Z"
    }
   },
   "cell_type": "code",
   "source": [
    "import jieba\n",
    "jieba.add_word(\"华为\",freq=1)\n",
    "list(jieba.cut(\"华为【66W快充头+2米线】\",HMM=True,cut_all=True))"
   ],
   "id": "b57eb376934b75a0",
   "outputs": [
    {
     "data": {
      "text/plain": [
       "['华为', '【', '66W', '快充', '快充头', '+', '2米', '米线', '】']"
      ]
     },
     "execution_count": 183,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "execution_count": 183
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-01-07T13:08:11.805555Z",
     "start_time": "2025-01-07T13:08:11.791280Z"
    }
   },
   "cell_type": "code",
   "source": [
    "from sklearn.feature_extraction.text import TfidfVectorizer\n",
    "\n",
    "# 示例文本数据\n",
    "corpus = [\n",
    "    'This is the first document.',\n",
    "    'This document is the second document.',\n",
    "    'And this is the third one.',\n",
    "    'Is this the first document?',\n",
    "]\n",
    "# 自定义分词器函数\n",
    "def weighted_tokenizer(text):\n",
    "    # 这里简单地按空格分割，实际中应使用适当的分词工具如 jieba\n",
    "    tokens = text.split()\n",
    "    print(tokens)\n",
    "\n",
    "    \n",
    "    return tokens\n",
    "# 初始化 TfidfVectorizer\n",
    "vectorizer = TfidfVectorizer(\n",
    "   # tokenizer=weighted_tokenizer,\n",
    ")\n",
    "\n",
    "# 拟合并转换文本数据\n",
    "X = vectorizer.fit_transform(corpus)\n",
    "\n",
    "# 打印特征名称（词汇表）\n",
    "print(\"Feature names:\", vectorizer.get_feature_names_out())\n",
    "\n",
    "# 打印 TF-IDF 矩阵\n",
    "print(\"TF-IDF Matrix:\\n\", X.toarray())"
   ],
   "id": "2bda31feefc3c3ca",
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Feature names: ['and' 'document' 'first' 'is' 'one' 'second' 'the' 'third' 'this']\n",
      "TF-IDF Matrix:\n",
      " [[0.         0.46979139 0.58028582 0.38408524 0.         0.\n",
      "  0.38408524 0.         0.38408524]\n",
      " [0.         0.6876236  0.         0.28108867 0.         0.53864762\n",
      "  0.28108867 0.         0.28108867]\n",
      " [0.51184851 0.         0.         0.26710379 0.51184851 0.\n",
      "  0.26710379 0.51184851 0.26710379]\n",
      " [0.         0.46979139 0.58028582 0.38408524 0.         0.\n",
      "  0.38408524 0.         0.38408524]]\n"
     ]
    }
   ],
   "execution_count": 132
  },
  {
   "metadata": {
    "ExecuteTime": {
     "end_time": "2025-01-07T13:14:41.362460Z",
     "start_time": "2025-01-07T13:14:41.345134Z"
    }
   },
   "cell_type": "code",
   "source": "",
   "id": "522477ad846cf71f",
   "outputs": [],
   "execution_count": null
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 2
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython2",
   "version": "2.7.6"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
