import requests
import sqlite3

from bs4 import BeautifulSoup

conn = sqlite3.connect('foofish.db')
c = conn.cursor()

def get_header(user_agent=None, accept=None):
    """return an header,type is dict"""
    if not user_agent:
        user_agent = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0'
    if not accept:
        accept = 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8'
    return {
        'User-Agent': user_agent,
        'Accept': accept
    }

def get_article_detail(url):
    req=requests.get(url,headers=get_header())
    bs=BeautifulSoup(req.content)
    title=bs.find('h1',attrs={'class':'header-title'}).text
    createtime=bs.find('p',attrs={'class':'header-date'}).text.split(',')[1]
    article_content=bs.find('article').text
    c.execute(
        "INSERT INTO article ('Title','CreateTime','Content') VALUES (?,?,?)",
        (title, createtime, article_content))
    conn.commit()

def get_article_url(i):
    if i==1:
        url='https://foofish.net/index.html'
    else:
        url='https://foofish.net/index'+str(i)+'.html'
    req=requests.get(url,headers=get_header())
    bs=BeautifulSoup(req.content)
    url_list=[dd.a['href'] for dd in bs.find_all('dd')]
    for article_url in url_list:
        get_article_detail(article_url)

for i in range(8,9):
    get_article_url(i)