from bs4 import BeautifulSoup
import lxml
import requests


def get_page(url):
    """
    获取页面内容
    :param url: URL链接
    :return: BeautifulSoup对象
    """
    response = requests.get(url)
    html_content = response.text
    soup = BeautifulSoup(html_content, 'lxml')
    return soup

def get_news_list(url):
    """
    获取列表页内容
    :param url: 列表URL
    :return: links列表
    """
    soup = get_page(url)
    news = soup.select('.banner-news ul li a')
    links = []
    for item in news:
        link = item.get('href')
        links.append(link)
    return links

def get_news_detail(url):
    """
    获取详细页信息
    :param url: URL链接
    :return: 详细页内容，字典
    """
    soup = get_page(url)
    news_detail = {}
    news_detail['title'] = soup.find('h1',{'class':'am-article-title'}).string
    news_detail['publish_date'] = soup.select('.am-article-meta span')[0].string
    news_detail['view_count'] = soup.select('.am-article-meta span')[1].string
    news_detail['content'] = soup.find('',{'class':'am-article'}).text
    return news_detail

def main():
    url = "http://localhost/shopxo/" # 列表页URL
    links = get_news_list(url) # 获取每一篇详情页链接
    news_list = []
    for link in links[1:]:
        news_detail = get_news_detail(link) # 获取详情
        news_list.append(news_detail)
    print(news_list)

if __name__ == "__main__":
    main()

# url = "http://localhost:8888/shopxo/"
# response = requests.get(url)
# html_content = response.text
# soup = BeautifulSoup(html_content,'lxml')
# news = soup.select('.banner-news ul li a')
# links = []
# for item in news:
#     link = item.get('href')
#     links.append(link)
#
# news_list = []
# for link in links[1:]:
#     news_detail = {}
#     response = requests.get(link)
#     html = response.text
#     soup = BeautifulSoup(html, 'lxml')
#     news_detail['title'] = soup.find('h1',{'class':'am-article-title'}).string
#     news_detail['publish_date'] = soup.select('.am-article-meta span')[0].string
#     news_detail['view_count'] = soup.select('.am-article-meta span')[1].string
#     news_detail['content'] = soup.find('',{'class':'am-article'}).text
#     news_list.append(news_detail)
# print(news_list)