import requests
from bs4 import BeautifulSoup
import csv
from multiprocessing import Queue
import  threading
import random
import time
import xlwt
import pymysql
import bs4
import urllib
import re
import pandas as pd
import json
import codecs

User_Agent = [
    "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36",
    "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
    "Mozilla/5.0 (Windows; U; Windows NT 6.1; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50",
    "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.6; rv:2.0.1) Gecko/20100101 Firefox/4.0.1",
    "Mozilla/5.0 (Windows NT 6.1; rv:2.0.1) Gecko/20100101 Firefox/4.0.1"]

HEADERS = {
    'User-Agent': User_Agent[random.randint(0, 4)],
    # 'User-Agent':'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:55.0) Gecko/201002201 Firefox/55.0',
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
    'Accept-Language': 'zh-CN,zh;q=0.8,en-US;q=0.5,en;q=0.3',
    'Accept-Encoding': 'gzip, deflate, br',
    'Cookie': '',
    'Connection': 'keep-alive',
    'Pragma': 'no-cache',
    'Cache-Control': 'no-cache'
}
path_list_error = 'scenery_error_inform22.txt'  # txt文件，用来保存获取jingdian_list5爬取出现错误的url
list_error_file = open(path_list_error, 'a+')  # 爬取出错的url追加到文件的末尾

path_jingdian_information = 'scenery_error_inform22.txt'
jingdian_information_file = open(path_jingdian_information,'a+')

csv_file = csv.reader(open('jdlist22.csv','r'))

cat_scene_id= []
cat_scene_name = []
cat_scene_rank = []
cat_scene_infor= []
cat_suggest = []
cat_scene_score = []
cat_scene_address=[]
cat_scene_phone=[]
cat_open_time=[]
cat_ticket=[]
cat_season=[]
cat_scene_transport=[]
cat_scene_comment_sum=[]
cat_scene_5=[]
cat_scene_4=[]
cat_scene_3=[]
cat_scene_2=[]
cat_scene_1=[]
cat_scene_good=[]
cat_scene_mid=[]
cat_scene_bad=[]

for url in csv_file:
    #print(url[0])

    try:
        time.sleep(1)
        response= requests.get(url[0],headers=HEADERS,allow_redirects=False,timeout=5)
        if response.status_code==200:
            html = response.content
            html = html.decode("utf-8")
            soup = BeautifulSoup(html, "html.parser")
            div_title = soup.find('div', class_='b_title clrfix')
            
            scene_id=''
            scene_name=''
            if div_title is not None:
                scene = div_title.find('h1', class_='tit').get_text()
                scene_name = re.sub("[A-Za-z0-9\']", "", scene)
                print(scene_name)
                if div_title.find('a').get('data-id') is not None:
                    scene_id = div_title.find('a').get('data-id')
                    print(scene_id)
                else:
                    href = div_title.find('a').get('href')
                    scene_id = re.findall("\d+", href)[0]
                    print(scene_id)
            #景點排名
            scene_rank=''
            div_rank = soup.find('div', class_='ranking')
            if div_rank is not None:
                scene_rank = div_rank.get_text().split('\n')[0]
                print(scene_rank)
            else:
                scene_rank =" "
                print(scene_rank)
            #建議旅遊時間
            suggest=''
            div_time = soup.find('div', class_='time')
            if div_time is not None:
                suggest = div_time.get_text()
                print(suggest)
            else:
                suggest = " "
                print(suggest)
            #景點評分
            avgscore=''
            div_score = soup.find('span', class_='cur_score')
            if div_score is not None:
                avgscore = div_score.get_text()
                print(avgscore)
            else:
                score =" "
                print(avgscore)
            #介紹
            content=''
            div_content = soup.find('div', class_='e_db_content_box')
            if div_content is not None:
                if div_content.find('p') is not None:
                    content = div_content.find('p').get_text()
                    print(content)
                else:
                    content = " "
                    print(content)
            else:
                content = " "
                print(content)
                
                
            #景點地址
            scene_address=''
            scene_phone=''
            td = soup.find('td', class_='td_l')
            link = {}
            if td is not None:
                for dl in td.find_all('dl'):
                    if dl.find('dt') is not None:
                        key = dl.find('dt').get_text()
                        print(key)
                    if dl.find('dd') is not None:
                        contentdd = dl.find('dd').get_text()
                        print(contentdd)
                    link[key] = contentdd
                if link:
                    if '地址:' in link:
                        scene_address = link['地址:']
                    else:
                        scene_address = " "
                    if '电话:' in link:
                        scene_phone = link['电话:']
                    else:
                        scene_phone = " "
                else:
                    scene_address = " "
                    scene_phone = " "
            else:
                scene_address = " "
                scene_phone = " "
            #開放時間
            open_time=''
            dl = soup.find('dl', class_='m_desc_right_col')
            if dl is not None:
                if dl.find('dd') is not None:
                    open_time = dl.find('dd').get_text()
                    print(open_time)
                else:
                    open_time = "　"
            else:
                open_time = "　"
            #景点门票
            price=''
            div_price = soup.find('div', class_="b_detail_section b_detail_ticket")
            if div_price is not None:
                if div_price.find('p') is not None:
                    price = div_price.find('p').get_text()
                    print(price)
                else:
                    price = " "
                    print(price)
            else:
                price = " "
                print(price)
            #旅游时节
            season=''
            div_season = soup.find('div', class_='b_detail_section b_detail_travelseason')
            if div_season is not None:
                season = div_season.find('p')
                if season is not None:
                    season = season.get_text()
                    print(season)
                else:
                    season = "　"
                    print(season)
            else:
                season = "　"
                print(season)
          
            #交通指南
            trans=''
            div_trans = soup.find('div', class_='b_detail_section b_detail_traffic')
            if div_trans is not None:
                div = div_trans.find('div', class_='e_db_content_box e_db_content_dont_indent')
                if div is not None:
                    trans = div.get_text()
                    print(trans)
                else:
                    trans = " "
                    print(trans)
            else:
                trans = " "
                print(trans)
            #评论数量
            num=''
            div_comment = soup.find('div', class_='b_detail_section b_detail_comment')
            if div_comment is not None:
                nums = div_comment.find('span').get_text()
                if nums is not None:
                    num = re.sub('[\(\)]', '', nums)
                    print("總評論:" + num)
                else:
                    num = " "
                    print("總評論:" + num)
            else:
                num=" "
                print("總評論:" + num)
            
            div_star = div_comment.find('div', class_='star-top')
            #print(div_star)
            scene_5=''
            scene_4=''
            scene_3=''
            scene_2=''
            scene_1=''
            scene_good=''
            scene_mid=''
            scene_bad=''           
            if div_star is not None:
                number = {}
                for li in div_star.find_all('li'):
                    score = li.find('em').get_text()
                    #print(score)
                    width = li.find('div', class_='rate').get('style')
                    #print(width)
                    percent = re.sub('[a-zA-z\:\%]', '', width)
                    #print(percent)
                    n = (int(percent) + 6) * 10 ** (-2)
                    #print(n)
                    total = int(n * int(num))
                    #print(total)
                    number[score] = total
                scene_5 = number['5分']
                print(scene_5)
                scene_4 = number['4分']
                print(scene_4)
                scene_3 = number['3分']
                print(scene_3)
                scene_2 = number['2分']
                print(scene_2)
                scene_1 = number['1分']
                print(scene_1)
                scene_good=number['5分']+number['4分']
                print(scene_good)
                scene_mid = number['3分']
                print(scene_mid)
                scene_bad = number['2分']+number['1分']
                print(scene_bad)
            else:
                scene_5 = "　"
                scene_4 = "　"
                scene_3 = "　"
                scene_2 = "　"
                scene_1 = "　"
                scene_good = "　"
                scene_mid = "　"
                scene_bad = "　"

            cat_scene_id.append(scene_id)
            cat_scene_name.append(scene_name)
            cat_scene_rank.append(scene_rank)
            cat_scene_infor.append(content)
            cat_suggest.append(suggest)
            cat_scene_score.append(avgscore)
            cat_scene_address.append(scene_address)
            cat_scene_phone.append(scene_phone)
            cat_open_time.append(open_time)
            cat_ticket.append(price)
            cat_season.append(season)
            cat_scene_transport.append(trans)
            cat_scene_comment_sum.append(num)
            cat_scene_5.append(scene_5)
            cat_scene_4.append(scene_4)
            cat_scene_3.append(scene_3)
            cat_scene_2.append(scene_2)
            cat_scene_1.append(scene_1)
            cat_scene_good.append(scene_good)
            cat_scene_mid.append(scene_mid)
            cat_scene_bad.append(scene_bad)
            
        else:
            list_error_file.write(url[0] + '\n')
    except Exception as e:
        list_error_file.write(url[0] + '\n')

print(len(cat_scene_id))
print(len(cat_scene_name))
print(len(cat_scene_rank))
print(len(cat_scene_infor))
print(len(cat_suggest))
print(len(cat_scene_score))
print(len(cat_scene_address))
print(len(cat_scene_phone))
print(len(cat_open_time))
print(len(cat_ticket))
print(len(cat_season))
print(len(cat_scene_transport))
print(len(cat_scene_comment_sum))
print(len(cat_scene_5))
print(len(cat_scene_4))
print(len(cat_scene_3))
print(len(cat_scene_2))
print(len(cat_scene_1))
print(len(cat_scene_good))
print(len(cat_scene_mid))
print(len(cat_scene_bad))

cat_scene_id,cat_scene_name,cat_scene_rank,cat_scene_infor,cat_suggest,cat_scene_score,
cat_scene_address,cat_scene_phone,cat_open_time,cat_ticket,cat_season,cat_scene_transport,
cat_scene_comment_sum,cat_scene_5,cat_scene_4,cat_scene_3,cat_scene_2,cat_scene_1,
cat_scene_good,cat_scene_mid,cat_scene_bad
city = pd.DataFrame({'scene_id':cat_scene_id,'scene_name':cat_scene_name,'scene_cityranl':cat_scene_rank,'scene_infor':cat_scene_infor,
                     'scene_suggest':cat_suggest,'scene_score':cat_scene_score,'scene_address':cat_scene_address,'scene_phone':cat_scene_phone,
                     'scene_time':cat_open_time,'scene_ticket':cat_ticket,'scene_season':cat_season,'scene_transport':cat_scene_transport,'scene_comment':cat_scene_comment_sum,'scene_5':cat_scene_5,'scene_4':cat_scene_4,'scene_3':cat_scene_3,
                     'scene_2':cat_scene_2,'scene_1':cat_scene_1,'scene_good':cat_scene_good,'scene_mid':cat_scene_mid,'scene_bad':cat_scene_bad})

city.to_csv('ajdlisttri22.csv', encoding='utf_8_sig')






