#coding:utf-8
import tkinter as tk
import requests, re
import csv
import time
import sqlite3
import sys
import threading
reload(sys)
sys.setdefaultencoding('utf-8')

class TbScrawl:
    def __init__(self):
        self.products = []

    def textPrint(self, message):
        self.result_text.insert(tk.END, message+'\n')

    def target(self):
        keyword = self.entry_keyword.get()
        depth = self.entry_depth.get()
        link = "https://s.taobao.com/search?q=" + keyword + "&sort=sale-desc"
        
        for i in range(int(depth.strip())):
            print u"正在读取第 " + str(i + 1) + u" 页。"
            self.textPrint("正在读取第 " + str(i + 1) + u" 页。")
            try:
                url = link + "&s=" + str(44 * i)
                html = self.get_html(url)
                self.parse_data(html)
            except Exception as e:
                info = u"spider_crawl出现异常：\n" + str(e)
                print(info)
                continue
        print u"爬取完毕，共获取到 " + str(len(self.products)) + u" 条商品信息。"
        self.textPrint("爬取完毕，共获取到 " + str(len(self.products)) + " 条商品信息。")
        print u"提示：此次爬取信息过程完整结束。"
        self.textPrint("提示：此次爬取信息过程完整结束。")
        
    def spider_crawl(self):
        self.result_text.delete(0.0,'end')  
        self.products = []
        t = threading.Thread(target = self.target)
        t.start()
        
        
    
    def get_html(self, url):
        '''
        访问url链接，取得html源码并返回
        :param url: url链接地址
        :return: 获取正常，返回html源码；出现异常，返回None
        '''
        try:
            user_agent = 'Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/45.0.2454.101 Safari/537.36'
            headers = {'User_agent': user_agent}
            r = requests.get(url, headers=headers)
            r.raise_for_status()
            r.encoding = "utf-8"
            return r.text
        except Exception as e:
            print u"获取html出现异常：\n" + str(e)
            self.textPrint("获取html出现异常：" + str(e))
            return None
            
    def parse_data(self, html):
        try:
            titles = re.findall(r'\"raw_title\":\".*?\"', html)
            prices = re.findall(r'\"view_price\":\"[\d.]*\"', html)
            deals = re.findall(
                r'\"view_sales\":\"[0-9]*[1-9][0-9]*[\u4e00-\u9fa5]+\"', html)
            pics = re.findall(r'\"pic_url\":\".*?"', html)
            detailurls = re.findall(r'\"detail_url":\".*?\"', html)
            locations = re.findall(r'\"item_loc\":\"\D*?\"', html)
            shops = re.findall(r'\"nick\":\".*?\"', html)
            shopurls = re.findall(r'\"shopLink\":\".*?\"', html)
            for i in range(len(titles)):
                product = {
                    'title':
                        eval(titles[i].split(":")[1]),
                    'price':
                        eval(prices[i].split(":")[1]),
                    'pic':
                        "https:" + eval(pics[i].split(":")[1].encode('utf-8').decode('unicode_escape')),
                    'detailurl':
                        "https:" + eval(detailurls[i].split(":")[1].encode('utf-8').decode('unicode_escape')),
                    'location':
                        eval(locations[i].split(":")[1]),
                    'shop':
                        eval(shops[i].split(":")[1]),
                    'shopurl':
                        "https:" + eval(shopurls[i].split(":")[1].encode('utf-8').decode('unicode_escape'))
                }
                self.products.append(product)
        except Exception as e:
            print u"提取信息出现异常：" + str(e)
            self.textPrint("提取信息出现异常：" + str(e))
    
    def target_csv(self):
        print u"开始写csv文件。"
        self.textPrint("开始写csv文件。")
        keyword = self.entry_keyword.get()
        f = open(keyword +'.csv', 'wb')
        writer = csv.writer(f)
        
        header = [
            'title', 'price', 'shop', 'location', 'pic',
            'detailurl', 'shopurl'
        ]
        writer.writerow(header)
        for product in self.products:
            plist = []
            for h in header:
                plist.append(product[h])
            writer.writerow(plist)
        f.close()
        self.products = []
        print u"成功保存数据到csv文件。"
        self.textPrint("成功保存数据到csv文件。")
        
    def write_to_csv(self):
        t = threading.Thread(target = self.target_csv)
        t.start()
    
    # def write_into_dataebase(self, tablename):
        # conn = sqlite3.connect('mydatabase.db')
        # create_tb_cmd='CREATE TABLE IF NOT EXISTS %s (\
            # title TEXT,\
            # price TEXT,\
            # shop TEXT,\
            # location TEXT,\
            # pic TEXT,\
            # detailurl TEXT,\
            # shopurl TEXT)'%tablename
        # try:
            # conn.execute(create_tb_cmd)  
        # except:  
            # print "table exits" 
        # cur = conn.cursor()
        # for pro in self.products:
            # insert_tb_cmd = 'insert into %s (title, price, shop, location, pic,\
            # detailurl, shopurl) values (%s, %s, %s, %s, %s, %s, %s)'% (tablename,'title', pro['price'], pro['shop'], pro['location'], pro['pic'], pro['detailurl'], pro['shopurl'])
            
            # conn.execute(insert_tb_cmd)
        # cur.close()
        # conn.commit()
        # conn.close()
        # print u"成功保存数据到mydatabase.db"
    def exit_app(self):  
        self.root.quit()

    def uiTk(self):
        self.root = tk.Tk()
        font_normal_size = 14
        self.root.title('TbCrawl')
        self.root.geometry('820x700')
        self.root.resizable(0, 0)

        label_title = tk.Label(self.root, text='TbCrawl', font=('', 26))
        label_title.pack(anchor='center')

        label_author = tk.Label(self.root, text='pzh', font=('', 14), fg='blue')
        label_author.pack(anchor='center')

        label_keyword = tk.Label(self.root, text='商品关键词：', font=('', font_normal_size))
        label_keyword.place(x=100, y=100)
        self.entry_keyword = tk.Entry(self.root, show=None, font=('', font_normal_size))
        self.entry_keyword.insert('end', '榴莲')
        self.entry_keyword.place(x=250, y=100)

        label_depth = tk.Label(self.root, text='深度≤100：', font=('', font_normal_size))
        label_depth.place(x=100, y=150)
        self.entry_depth = tk.Entry(self.root, show=None, font=('', font_normal_size))
        self.entry_depth.insert('end', '10')
        self.entry_depth.place(x=250, y=150)

        self.result_text = tk.Text(self.root, font=('', font_normal_size), width=60, height=20)
        self.result_text.place(x=100, y=300)
        
        button_run = tk.Button(
            self.root, text='开始爬取', font=('', font_normal_size), command=self.spider_crawl)
        button_run.place(x=400, y=250)
        
        button_save_csv = tk.Button(
            self.root, text='保存csv', font=('', font_normal_size), command=self.write_to_csv)
        button_save_csv.place(x=520, y=250)

        button_exit = tk.Button(
            self.root, text='退出程序', font=('', font_normal_size), command=self.exit_app)
        button_exit.place(x=640, y=250)

        self.root.mainloop()
            
        
            
if __name__ == '__main__':
    app = TbScrawl()
    app.uiTk()
    
    
    
    
    
    
    
    
    
    
    
    
    