# -*- coding: UTF-8 -*-
import jieba
from dao.mysql import Mysql
from bs4 import BeautifulSoup
import Levenshtein
from tool.StringTools import StringTools


filters = {'number': '编号', 'account': '采购单位', 'province': '地址', 'due_date': '截止时间', 'agency': '代理机构名称',
           'release_date': '公告时间', 'product': '产品'}
# 获得省市标化
SQL_queryP = "SELECT origin,standard FROM `crawler`.`standardization` WHERE scope = '省市'"
provinces = mysql.getAll(SQL_queryP)
# 具体项目分析，切词
soup = soup.get_text("|")
soup = StringTools.remove_symbol(soup)
print(soup)
# keyword = '项目编号'
id = 0
for ft in filters:
    mysql = Mysql()
    keyword = filters[ft]
    index_start = StringTools.find_excluding(soup, keyword)  # 把后面有详见的去掉
    if index_start > 0:
        index_start = index_start + len(keyword)
        index_start = StringTools.skip(index_start, soup)
        end = StringTools.end(index_start, soup)
        data = soup[index_start:end]
        print(keyword, data)
        # print(index_start, "---", end, "---", soup[index_start:end + 4])
        # 如过当前是省份处理,mapping
        if ft == "province":
            for province in provinces:
                origin = province['origin'].decode("utf8")
                if data.find(str(origin)) >= 0:
                    data = province['standard'].decode("utf8")
        if id == 0:
            SQL_save = "INSERT INTO bidding (" + ft + ",`origin_site`) VALUES ( %s,'" + site + "')"
            print(SQL_save)
            id = mysql.insertOne(SQL_save, [data])
        else:
            SQL_update = "UPDATE `crawler`.`bidding` SET `" + ft + "` = '" + data + "' WHERE `id` = " + str(id)
            print(SQL_update)
            mysql.update(SQL_update)
    mysql.dispose()


