from bs4 import BeautifulSoup

import requests
import csv
import bs4
import time
from datetime import datetime
import mysql.connector

import json
import os
import sys
import io
import xlrd
from xlrd import xldate_as_tuple

from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By

'''
美国猪胴体FOB出厂价格
美国农业部猪胴体价格网址：
https://mpr.datamart.ams.usda.gov/menu.do?path=Products\Pork\Daily%20Pork\(LM_PK602)%20National%20Daily%20Negotiated%20Pork%20Report%20-%20FOB%20Plant%20-%20Afternoon
获取table中第一个价格数据 Carcass
单位为 美分/英镑

'''
chrome_path ='/usr/bin/chromedriver'
# chrome_path = 'C:\Program Files\chrome\chromedriver.exe'
chrome_options = Options()
chrome_options.add_argument('--no-sandbox')
chrome_options.add_argument('--headless')
chrome_options.add_argument('--disable-gpu')
chrome_options.add_argument('User-Agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/86.0.4240.183 Safari/537.36')


# 创建链接数据库
config_168 = {'host': '127.0.0.1',  # 默认127.0.0.1
          'user': 'root', 'password': 'Tnq39/*riqJcC', 'port': 3306,  # 默认即为3306
          'database': 'ccii_redesign', 'charset': 'utf8','auth_plugin':'mysql_native_password'  # 默认即为utf8
          }

config1 = {'host': 'rm-2zefd6473rz062234.mysql.rds.aliyuncs.com',  # 默认127.0.0.1
          'user': 'prod_ccii', 'password': 'cI1546_wodesecerts', 'port': 3306,  # 默认即为3306
          'database': 'ccii_prod', 'charset': 'utf8', 'auth_plugin':'mysql_native_password'  # 默认即为utf8
          }

config = {'host': 'rm-2zefd6473rz062234.mysql.rds.aliyuncs.com',  # 默认127.0.0.1
          'user': 'prod_python', 'password': '939_58J6kAW)P&^', 'port': 3306,  # 默认即为3306
          'database': 'ccii_prod', 'charset': 'utf8'  # 默认即为utf8
          }

# bang = 0.45359237
bang = 0.454


# 查询数据库列表
def find_name_List(price_date):
    cnn = ''
    try:
        cnn = mysql.connector.connect(**config)  # connect方法加载config的配置进行数据库的连接，完成后用一个变量进行接收
    except mysql.connector.Error as e:
        print('数据库链接失败！', str(e))
    else:  # try没有异常的时候才会执行
        print("连接数据库sucessfully!")

    # 插入数据库
    cursor = cnn.cursor(buffered=True)  # 获取插入的标记位
    try:

        stmt = 'select name from alarm_intl_pig_price  where date_format(price_date, "%Y-%m-%d") = %s '
        cursor.execute(stmt, [price_date])
        data = cursor.fetchall()
        # cnn.commit()
        # print("查询数据成功！")

    except mysql.connector.Error as e:
        print('查询数据报错！', str(e))
    finally:  # 无论如何都会执行下面的语句
        cursor.close()  # 关闭标记位
        cnn.close()  # 关闭数据库链接

    return data


# 处理成sql插入数据
def handle_sql_data(in_data):
    # data[0] 日期  data[1]  价格 美分/磅

    db_names = find_name_List(in_data[0][1])
    db_name_list = list()
    
    for item in db_names:
        db_name_list.append(list(item)[0])
    
    in_data_copy = list(in_data)
    print(db_name_list)
    
    for item in in_data_copy:
        if  item[0] in db_name_list:
            in_data.remove(item)
            print("{} -->在数据库已存在，移除该条记录不保存。".format(item[0]))
        else:
            print("不存在--" + item[0])    

    for data in in_data:
        rate = get_rate(data[1], "USD")
        # 汇率2
        data.append(rate)
        # 人民币/千克
        fob_price = format(float(data[2]) / 100 / bang * float(rate), ".2f")

        # CIF价格（元/KG）=FOB出厂价格(元/KG)+海运费（2000*7.1/25000  元/KG）+海运保险费（FOB出厂价格*0.05% 元/KG）
        cif_price = format(float(fob_price) + float(2000 * 7.1 / 25000) + float(fob_price) * 0.0005, ".2f")
        # cif_price 3
        data.append(cif_price)
        # 换算fob_price 4
        data.append(fob_price)
        # 国家英文 5
        data.append("USA")
        # 国家中文 6
        data.append("美国")
        # 币种 7
        data.append("USD")
        # 单位 8
        data.append("美分/磅")
        print(data)

    return in_data




'''
抓取获取页面数据
'''
def chrome_get(c_url):
    rs_data = list()
    browser = webdriver.Chrome(executable_path=chrome_path, options=chrome_options)
    # browser.get(url)  # 执行了打开网页操作

    browser.get(c_url)
    # print(browser.page_source)

    # 必须提交2次才能拿到数据
    browser.find_element_by_tag_name("form").submit()
    browser.find_element_by_tag_name("form").submit()
    # soup = BeautifulSoup (browser.page_source, 'lxml')
    browser.refresh()
    print(browser.current_url)
    rs1 = browser.page_source
    # print (rs1)

    rs2 = BeautifulSoup(rs1, 'lxml')
    # print(soup.prettify())
    tables = rs2.find_all("table", class_ = "htmlTable")
    # print(tables[0])
    atags = tables[0].find_all("a")
    # print("atags="+str(atags))
    priceDate = atags[0].string
    priceDate = time.strftime('%Y-%m-%d', time.strptime(priceDate, '%m/%d/%Y'))
    print("价格日期=" + priceDate)
    ahref = "https://mpr.datamart.ams.usda.gov/" + atags[0].get("href")  # .replace(" ", "%20") #.replace("%5C", "/")
    print("ahref=" + ahref)

    browser.get(ahref)
    browser.refresh()

    rs3 = browser.page_source
    pageData = BeautifulSoup(rs3, 'lxml')
    # print("pageData" + pageData.prettify())
    '''
    价格单位 美分/磅
    1 磅=0.45359237 千克(公斤)
    '''
    tableList = pageData.find_all("table", class_ = "htmlTable")
    # print(tableList[1])
    trs = tableList[1].find_all("tr")
    print(trs)
    divs = trs[2].find_all("div")
    for i, div in enumerate(divs):
        data = list()
        if i == 0:
            data.append("猪胴体")
        elif i == 1:
            data.append("猪里脊")
        elif i == 2:
            data.append("猪头肉")
        elif i == 3:
            data.append("猪前腿")
        elif i == 4:
            data.append("猪肋排")
        elif i == 5:
            data.append("猪后腿")
        elif i == 6:
            data.append("五花肉")
        else:
            data.append("---")

        data.append(str(priceDate))
        data.append(str(div.string))
        rs_data.append(data)



    # divs = tds[0].find_all("div")
    # # print(divs[0])
    # price = divs[0].string
    # print("美分/英镑=" + price)
    # priceKg = format(float(price) / 100 / bang, ".2f")
    # print("美元/千克=" + priceKg)
    browser.close()

    # 价格日期 0
    # rs_data.append(priceDate)
    # 原始价格 1 美分/英镑
    # rs_data.append(price)
    # FOB价格 2 美元/千克
    # rs_data.extend(priceKg)

    return rs_data


def main(reqUrl):
    print(" 开始爬取数据" + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
    data_item = chrome_get(reqUrl)
    price_data = handle_sql_data(data_item)
    #
    # # 保存到数据库
    if len(price_data) > 0 :
        save(price_data)
    print("爬取数据完成！" + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))



# 查询数据库得到汇率
def get_rate(priceDate, currencyType):
    cnn = ''
    rate = 0
    try:
        cnn = mysql.connector.connect(**config)  # connect方法加载config的配置进行数据库的连接，完成后用一个变量进行接收
    except mysql.connector.Error as e:
        print('数据库链接失败！', str(e))
    else:  # try没有异常的时候才会执行
        print("sucessfully!")

    # 插入数据库
    cursor = cnn.cursor(buffered=True)  # 获取插入的标记位
    try:

        # 第三种：可以一次插入多条，效率比一条条插高,用的方法是executemany 猪牛羊 1 2 3
        stmt = "select rate_value from alarm_rate_cny where worm_date like \"%" + priceDate + "%\"" + "and rate_en_name = \'"  + (currencyType + "CNY\'")
        # print(stmt)
        # for item in data:
        #     print(item)
        cursor.execute(stmt)
        rate = cursor.fetchone()[0]
        cnn.commit()
        print(rate)
        print("查询数据成功！")

    except mysql.connector.Error as e:
        print('查询数据报错！', str(e))
    finally:  # 无论如何都会执行下面的语句
        cursor.close()  # 关闭标记位
        cnn.close()  # 关闭数据库链接

    return rate

# 保存到数据库
def save(data):
    cnn = ''
    try:
        cnn = mysql.connector.connect(**config)  # connect方法加载config的配置进行数据库的连接，完成后用一个变量进行接收
    except mysql.connector.Error as e:
        print('数据库链接失败！', str(e))
    else:  # try没有异常的时候才会执行
        print("sucessfully!")

    # 插入数据库
    cursor = cnn.cursor(buffered=True)  # 获取插入的标记位
    try:

        # 第三种：可以一次插入多条，效率比一条条插高,用的方法是executemany 猪牛羊 1 2 3
        stmt = 'insert into alarm_intl_pig_price(name, price_date, price, rate, price_cif, price_fob, country_en, country_cn, currency_type, unit, create_date, del_flag) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, NOW(), 0)'
        # for item in data:
        #     print(item)
        # cursor.execute(stmt, item)
        cursor.executemany(stmt, data)
        cnn.commit()
        print("插入数据成功！")

    except mysql.connector.Error as e:
        print('插入数据报错！', str(e))
    finally:  # 无论如何都会执行下面的语句
        cursor.close()  # 关闭标记位
        cnn.close()  # 关闭数据库链接


'''
results_url = "https://mpr.datamart.ams.usda.gov/results.do"
home_url = "https://mpr.datamart.ams.usda.gov/"
'''

# 表单提交页面
url = "https://mpr.datamart.ams.usda.gov/menu.do?path=Products\Pork\Daily%20Pork\(LM_PK602)%20National%20Daily%20Negotiated%20Pork%20Report%20-%20FOB%20Plant%20-%20Afternoon"

if __name__ == '__main__':
    # main(home_url)
    main(url)








