<?php

namespace App\Http\Service;

use App\Libs\HttpCurlLibrary;
use Illuminate\Support\Facades\Config;
use Illuminate\Support\Facades\Log;
use App\Models\Forbiddenwords;
use App\Models\Forbiddenwordslog;
use App\Http\Service\BaseService;
//导入核心文件
require_once "../vendor/jieba-php/src/vendor/multi-array/MultiArray.php";
require_once "../vendor/jieba-php/src/vendor/multi-array/Factory/MultiArrayFactory.php";
require_once "../vendor/jieba-php/src/class/Jieba.php";
require_once "../vendor/jieba-php/src/class/Finalseg.php";
require_once "../vendor/jieba-php/src/class/JiebaAnalyse.php";
use Fukuball\Jieba\Jieba;
use Fukuball\Jieba\Finalseg;
use Fukuball\Jieba\JiebaAnalyse;

//结巴中文分词
class JiebaService extends BaseService
{
   
    public $forbiddenwords;
    public $imagecheck;
    public $contentcheck;
    public function __construct(Forbiddenwords $model,Forbiddenwordslog $forbiddenwordslog)
    {
        parent::__construct();
        $this->imagecheck=config('myurl.imagecheck.url');
        $this->contentcheck=config('myurl.contentcheck.url');
        $this->model=$model;
        $this->forbiddenwordslog=$forbiddenwordslog;
    }

    //获取列表 $content="我是一个中国人"
    public function  part_word($content){
        ini_set('memory_limit', '600M');
        Jieba::init(array('mode'=>'default','dict'=>'big'));
        Finalseg::init();

        // 使用 jieba-php 分词库进行中文分词
        $arr = Jieba::cut($content); // 将文本拆分为单词
       // $stopWords = ['的', '了', '在', '是', '我']; // 中文停用词列表
        #dd($arr);
        //return array_diff($words, $stopWords); // 去除停用词，返回关键词数组
        return $arr;
    }
    
   
    

}
