# -*- coding=utf-8 -*-
import requests
from bs4 import BeautifulSoup
import time
import pymysql                  
pymysql.install_as_MySQLdb()
import MySQLdb
import datetime
import time

dbhost = 'rm-2evgq2n507895djo1o.mysql.rds.aliyuncs.com'
dbname = 'db_twitter'
dbuser = 'lorolana'
dbpassword = 'lorolana_1'

class TwitterPerson(object):
    def __init__(self):
        self.name = ''
        self.location = ''
        self.desc = ''
        self.joinDate = ''
        self.totalfollowers = ''
        self.totalfollowing  = ''
        self.totaltwitter = ''
        self.firsttwitterdate = ''
        self.tweets = []

class TweetInfo(object):
    def __init__(self):
        self.name = ''
        self.writtentime = ''
        self.text = ''
        self.urlss = ''
        self.picss = ''
        self.tweetiostime = ''
        self.receiver = ''
        self.is_reply = ''
        self.replys = ''
        self.retweets = ''
        self.likers = ''
        self.hashtagss = ''
        self.cashtagss = ''
        self.atreplyss = ''
        self.atreplycount = ''

if __name__ == '__main__':
    twitter_url = "https://twitter.com"
    t_sql = "insert into tb_twitter_tweet (user_name, writtentime, text, replys, retweets, likers, urls, pics, hashtags, cashtags, atreplys, atreplycount, is_mention) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
    for ci in open('./company.csv', 'r+').readlines():
        ci = ci.strip().split(',')
        permno = ci[0]
        gvkey = ci[1]
        tickersymbol = ci[2]
        name = ci[3]
        sector = ci[4]
        url =ci[5]
        company_name = url.split('/')[-1:][0]
        #for name in ['AbbottNews', 'AbbottGlobal', 'abbvie', 'Accenture', 'ATVI_AB', 'acuitybrands', 'Adobe', 'AdvanceAuto', 'TheAESCorp', 'aetna']:

        page_search_url = "https://twitter.com/i/profiles/show/{0}/timeline/tweets?include_available_features=1&include_entities=1&max_position={1}&reset_error_state=false"
        params = []
        sql = "insert into tb_twitter_person (name, location, `desc`, joinDate, totalfollowers, totalfollowing, totaltwitter, firsttwitterdate, tickersymbol, gvkey, permno) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
        r = ''
        try:
            r = requests.get(url).text
        except Exception:
            try: 
                r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
            except Exception:
                try:
                    r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
                except Exception:
                    try:
                        r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
                    except Exception:
                        try:
                            r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
                        except Exception:
                            print("error")

        soup = BeautifulSoup(r, 'html.parser', from_encoding='utf-8')
        if not soup.find(class_="ProfileNav-item--tweets"): 
            print(name)
            continue
        totaltwitter = soup.find(class_="ProfileNav-item--tweets").findAll('span')[2].text.strip()
        totalfollowing = soup.find(class_="ProfileNav-item--following").findAll('span')[2].text.strip()
        totalfollowers = soup.find(class_="ProfileNav-item--followers").findAll('span')[2].text.strip()
        desc = soup.find(class_="ProfileHeaderCard-bio").text.strip()
        location = soup.find(class_="ProfileHeaderCard-location").findAll('span')[1].text.strip()
        joinDate = soup.find(class_="ProfileHeaderCard-joinDate").findAll('span')[1]['title']
        timeline = soup.find(id="stream-items-id").findAll("li", class_="js-stream-item")
        next_max_position = ''
        idx = 0
        wtdoc = timeline[0].find(class_='js-relative-timestamp')
        if wtdoc:
            firsttwitterdate = wtdoc['data-time']
        else:
            firsttwitterdate = timeline[0].find(class_='js-short-timestamp')['data-time']

        conn= MySQLdb.connect(
                host=dbhost,
                port = 3306,
                user = dbuser,
                passwd = dbpassword,
                db = dbname,
                charset = 'utf8',
        )
        cur = conn.cursor()
        params = [str(name), str(location), str(desc), str(joinDate), str(totalfollowers), str(totalfollowing), str(totaltwitter), str(firsttwitterdate), str(tickersymbol), str(gvkey), str(permno)]
        cur.execute(sql, params)
        conn.commit()
        conn.close()

        params = []
        writtentime = ''
        tweets = []
        tweetInfos = []
        for tl in timeline:
            tweetInfo = TweetInfo()
            tweetInfos.append(tweetInfo)
            tweetInfo.name = name
            idx += 1
            if tl.find("div", class_="ReplyingToContextBelowAuthor"):
                # is_reply 
                pass
            if idx % 20 == 0:
                next_max_position = tl['data-item-id']
            if tl.find(class_='js-relative-timestamp'):
                tweetInfo.writtentime = tl.find(class_='js-relative-timestamp')['data-time']
            elif tl.find(class_='js-short-timestamp'):
                tweetInfo.writtentime = tl.find(class_='js-short-timestamp')['data-time']
            else:
                tweetInfo.writtentime = ''
            ptext = tl.find(class_='js-tweet-text')
            urls = []
            atreplycount = 0
            atreplys = []
            hashtags = []
            cashtags = []
            if not ptext:
                text = ''
            else :
                text = ptext.text.strip()
                tweetInfo.text = ptext.text.strip()
                urla = ptext.findAll('a')
                if urla:
                    for linkurl in ptext.findAll(class_='twitter-timeline-link'):
                        urls.append(linkurl['href'])
                    for atreplyurl in ptext.findAll(class_='twitter-atreply'):
                        if atreplyurl and atreplyurl.find('b'): 
                            atreplyurl = atreplyurl.find('b')
                            if atreplyurl:
                                atreplycount += 1
                                atreplys.append(atreplyurl.text)
                    # hashtags
                    for ht in ptext.findAll(class_="twitter-hashtag"):
                        tagname = ht.find('b').text
                        hashtags.append(tagname)
                    # cashtags
                    for ct in ptext.findAll(class_="twitter-cashtag"):
                        cashname = ct.find('b').text
                        cashtags.append(cashname)
            tweetInfo.urlss = ','.join(urls)
            tweetInfo.hashtagss = ','.join(hashtags)
            tweetInfo.cashtagss = ','.join(cashtags)
            tweetInfo.atreplyss = ','.join(atreplys)
            tweetInfo.atreplycount = atreplycount
            urlss = ','.join(urls)
            replys = ''
            retweets = ''
            likers = ''
            # replys
            if tl.find(class_="js-actionReply"):
                for span in tl.find(class_="js-actionReply").findAll('span'):
                    acvalue = span.find(class_='ProfileTweet-actionCountForPresentation')
                    if acvalue:
                        if not acvalue.text: 
                            tweetInfo.replys = 0
                        else: 
                            tweetInfo.replys = acvalue.text
                        break
            # retweets
            if tl.find(class_="js-actionRetweet"):
                for span in tl.find(class_="js-actionRetweet").findAll('span'):
                    acvalue = span.find(class_='ProfileTweet-actionCountForPresentation')
                    if acvalue:
                        if not acvalue.text: 
                            tweetInfo.retweets = 0
                        else: 
                            tweetInfo.retweets = acvalue.text
                        break
            # likers
            if tl.find(class_="js-actionFavorite"):
                for span in tl.find(class_="js-actionFavorite").findAll('span'):
                    acvalue = span.find(class_='ProfileTweet-actionCountForPresentation')
                    if acvalue:
                        if not acvalue.text: 
                            tweetInfo.likers = 0
                        else: 
                            tweetInfo.likers = acvalue.text
                        break
            # pics
            pics = []
            for picdiv in tl.findAll(class_="js-adaptive-photo"):
                pics.append(picdiv['data-image-url'])
            tweetInfo.picss = ','.join(pics)

        rc = 0
        while True:
            print(len(tweetInfos))
            import json
            try:
                r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
            except Exception:
                try: 
                    r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
                except Exception:
                    try:
                        r = json.loads(requests.get(page_search_url.format(company_name, next_max_position)).text)
                    except Exception:
                            continue
            soup = BeautifulSoup(r['items_html'], 'html.parser', from_encoding='utf-8')
            timeline = soup.findAll("li", class_="js-stream-item")
            if not timeline: break
            for tl in timeline:
                atreplycount = 0
                atreplys = []
                hashtags = []
                cashtags = []
                urls = []
                idx += 1
                tweetInfo = TweetInfo()
                tweetInfos.append(tweetInfo)
                tweetInfo.name = name
                if idx % 20 == 0:
                    next_max_position = tl['data-item-id']
                if tl.find(class_='js-relative-timestamp'):
                    tweetInfo.writtentime = tl.find(class_='js-relative-timestamp')['data-time']
                elif tl.find(class_='js-short-timestamp'):
                    tweetInfo.writtentime = tl.find(class_='js-short-timestamp')['data-time']
                ptext = tl.find(class_='js-tweet-text')
                if not ptext:
                    continue
                tweetInfo.text = ptext.text.strip()
                urla = ptext.findAll('a')
                if urla:
                    for linkurl in ptext.findAll(class_='twitter-timeline-link'):
                        urls.append(linkurl['href'])
                    for atreplyurl in ptext.findAll(class_='twitter-atreply'):
                        if atreplyurl and atreplyurl.find('b'): 
                            atreplyurl = atreplyurl.find('b')
                            if atreplyurl:
                                atreplycount += 1
                                atreplys.append(atreplyurl.text)
                    # hashtags
                    for ht in ptext.findAll(class_="twitter-hashtag"):
                        tagname = ht.find('b').text
                        hashtags.append(tagname)
                    # cashtags
                    for ct in ptext.findAll(class_="twitter-cashtag"):
                        cashname = ct.find('b').text
                        cashtags.append(cashname)

                tweetInfo.hashtagss = ','.join(hashtags)
                tweetInfo.cashtagss = ','.join(cashtags)
                tweetInfo.atreplyss = ','.join(atreplys)
                tweetInfo.atreplycount = atreplycount
                tweetInfo.urlss = ','.join(urls)
                replys = ''
                retweets = ''
                likers = ''
                # replys
                if tl.find(class_="js-actionReply"):
                    for span in tl.find(class_="js-actionReply").findAll('span'):
                        acvalue = span.find(class_='ProfileTweet-actionCountForPresentation')
                        if acvalue:
                            if not acvalue.text: 
                                tweetInfo.replys = 0
                            else: 
                                tweetInfo.replys = acvalue.text
                            break
                # retweets
                if tl.find(class_="js-actionRetweet"):
                    for span in tl.find(class_="js-actionRetweet").findAll('span'):
                        acvalue = span.find(class_='ProfileTweet-actionCountForPresentation')
                        if acvalue:
                            if not acvalue.text: 
                                tweetInfo.retweets = 0
                            else: 
                                tweetInfo.retweets = acvalue.text
                            break
                # likers
                if tl.find(class_="js-actionFavorite"):
                    for span in tl.find(class_="js-actionFavorite").findAll('span'):
                        acvalue = span.find(class_='ProfileTweet-actionCountForPresentation')
                        if acvalue:
                            if not acvalue.text: 
                                tweetInfo.likers = 0
                            else: 
                                tweetInfo.likers = acvalue.text
                            break
                # pics
                pics = []
                # pics
                for picdiv in tl.findAll(class_="js-adaptive-photo"):
                    pics.append(picdiv['data-image-url'])
                tweetInfo.picss = ','.join(pics)
        t_sql = "insert into tb_twitter_tweet (user_name, writtentime, text, replys, retweets, likers, urls, pics, hashtags, cashtags, atreplys, atreplycount, is_mention) values (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
        lidx = 0
        try:
            conn= MySQLdb.connect(
                    host=dbhost,
                    port = 3306,
                    user = dbuser,
                    passwd = dbpassword,
                    db = dbname,
                    charset = 'utf8',
            )
        except Exception:
            try:
                conn= MySQLdb.connect(
                        host=dbhost,
                        port = 3306,
                        user = dbuser,
                        passwd = dbpassword,
                        db = dbname,
                        charset = 'utf8',
                )
            except Exception:
                pass
        cur = conn.cursor()
        params = []
        for tf in tweetInfos:
            params.append([str(tf.name), str(tf.writtentime), str(tf.text), str(tf.replys), str(tf.retweets), str(tf.likers), str(tf.urlss), str(tf.picss), str(tf.hashtagss), str(tf.cashtagss), str(tf.atreplyss), str(tf.atreplycount), str('')])
            lidx += 1
            if not lidx % 1000:
                cur.executemany(t_sql, params)
                conn.commit()
                params = []
        if lidx % 1000:
            cur.executemany(t_sql, params)
            conn.commit()
        conn.close()
