from fastapi import FastAPI, params
from pydantic import BaseModel
from wss.stopWords import stopWordsTextFile
from wss.jieba import ModeSelectionSetting, seg_depart_jieba,seg_depart_Count_jieba
from wss.pkuseg import seg_depart_pkuseg, seg_depart_Count_pkuseg

app = FastAPI(title='中文分词服务',description='中文分词服务描述')

class Item(BaseModel):  # 定义一个类用作参数
    text: str
    is_DelStopWords: bool = False
    stopWords: str = "baidu"
    ModeSelection: str = "jzms"

@app.get("/")
async def root():
    return {"message": "Welcome to the Chinese word segmentation service"}

@app.post("/jieba",summary='jieba分词',description='使用结巴进行分词，可去除停用词。',tags=['jieba'])
async def jieba(item : Item):
    stopWordsTextFile(item.stopWords)
    ModeSelectionSetting(item.ModeSelection)
    words = seg_depart_jieba(item.text, item.is_DelStopWords)
    return {"words": words}

@app.post("/jieba/count", summary='jieba词频统计', description='使用结巴进行分词，去除停用词，进行词频统计。',tags=['jieba'])
async def jiebaCount(item : Item):
    stopWordsTextFile(item.stopWords)
    ModeSelectionSetting(item.ModeSelection)
    words = seg_depart_Count_jieba(item.text, item.is_DelStopWords)
    return {"words": words}


@app.post("/pkuseg",summary='pkuseg分词',description='使用pkuseg进行分词，可去除停用词。',tags=['pkuseg'])
async def pkuseg(item: Item):
    words = seg_depart_pkuseg(item.text, item.is_DelStopWords)
    return {"words": words}

@app.post("/pkuseg/count", summary='pkuseg词频统计', description='使用结巴进行分词，去除停用词，进行词频统计。',tags=['pkuseg'])
async def pkusegCount(item: Item):
    words = seg_depart_Count_pkuseg(item.text, item.is_DelStopWords)
    return {"words": words}
