{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [],
   "source": [
    "from datetime import datetime, timedelta\n",
    "from bs4 import BeautifulSoup\n",
    "import urllib\n",
    "import re"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 4,
   "metadata": {},
   "outputs": [],
   "source": [
    "# 函数名：parse_station\n",
    "# 参数：station\n",
    "# 函数功能：将前面下载到的html文件打开，并执行分析，从html代码中提取感兴趣的数据并保存成一个csv文件\n",
    "# 原作者：Randy Olson\n",
    "# 程序修改：西南科技大学 通信1701 刘仲夏小组\n",
    "# 修改时间：2020年8月29日\n",
    "\n",
    "def parse_station(dir_name):\n",
    "    # 此处修改获取天气信息的时间段\n",
    "    current_date = datetime(year=2014, month=7, day=1)\n",
    "    end_date = datetime(year=2014, month=7, day=16)\n",
    "\n",
    "    with open('{}.csv'.format(dir_name), 'w', encoding='utf-8') as out_file:\n",
    "        out_file.write('date,Minimum Temperature(F),Mean Temperature(F),Maximum Temperature(F),'\n",
    "                       'Mean Sea Level Pressure,Mean Dew Point,'\n",
    "                       'Total Precipitation(IN),Visibility,'\n",
    "                       'Snow Depth,Mean Wind Speed,'\n",
    "                       'Maximum Sustained Wind Speed,Maximum Wind Gust\\n')\n",
    "\n",
    "    # 测试代码\n",
    "        while current_date != end_date:\n",
    "            try_again = False\n",
    "            with open('{}/{}-{}-{}.html'.format(dir_name,\n",
    "                                            current_date.year,\n",
    "                                            current_date.month,\n",
    "                                            current_date.day)) as in_file:\n",
    "\n",
    "                # 加载beautiful soup，便于分析网页\n",
    "                # 本段的大概逻辑以注释形式给出\n",
    "                soup = BeautifulSoup(in_file.read(), 'html.parser', from_encoding=\"utf-8\")\n",
    "\n",
    "                # 找到所有table标签的项，缩小搜索范围\n",
    "                weather_data_table = soup.find_all('table')\n",
    "                # 寻找table中weatherhistory_results\n",
    "                for item in weather_data_table:\n",
    "                    if item['class'][0] == \"weatherhistory_results\":\n",
    "                        target_table = item\n",
    "                        break\n",
    "                # 创建数据列表，按照“日期、最低温度、平均温度、最大温度、平均海平面、平均露点、总降雨量、可见度、雪深、平均风速、最大持续风速、最大阵风”\n",
    "                weather_data_value = []\n",
    "                weather_data_value.append('{}/{}/{}'.format(current_date.year, current_date.month, current_date.day))\n",
    "                target_tag = target_table.find_all(class_ = re.compile('weatherhistory_results_datavalue *'))\n",
    "                # 确定class = value的内容，即为各项数据的值。其中，空值用None代替\n",
    "                for item in target_tag:\n",
    "                    weather_data_value_temp = item.find('span', class_ = 'value')\n",
    "                    if weather_data_value_temp:\n",
    "                        weather_data_value.append(weather_data_value_temp.string)\n",
    "                    else:\n",
    "                        weather_data_value.append('None')\n",
    "                        \n",
    "                # 尝试将数据写入csv文件，如果出现错误，执行异常处理，即重新下载网页\n",
    "                try:        \n",
    "                    out_file.write(','.join('%s' %item for item in weather_data_value))\n",
    "                    out_file.write('\\n')\n",
    "                except:\n",
    "                    try_again = True\n",
    "\n",
    "                # 如果需要重新下载网页，则从https://www.almanac.com/重新下载\n",
    "                # 如果parser函数卡在某个日期时，需要手动打开下载的html网页找出原因。有数据出现遗失时，parser函数会卡住。\n",
    "                # 此时可以手动将数据填入csv表格，或者直接跳过该日期\n",
    "                if try_again:\n",
    "                    print('Error with date {}'.format(current_date))\n",
    "\n",
    "                    lookup_URL = 'https://www.almanac.com/weather/history/{}/{}/{}-{}-{}'\n",
    "        \n",
    "                # 伪造请求头，其中包括Cookie和UA等欺骗信息\n",
    "                    headers = {\n",
    "                'Sec-Fetch-Dest': \"document\",\n",
    "                'Cookie': \"_vuid=251ab9fb-f390-484f-8919-ab9fa3acaa9e; _vuid=251ab9fb-f390-484f-8919-ab9fa3acaa9e; _vuid=251ab9fb-f390-484f-8919-ab9fa3acaa9e; has_js=1; __gads=ID=970563c97e41b425:T=1598796131:S=ALNI_MaJx39gB7pOCYNSGUiSeG5r_EOnKA; _ga=GA1.2.1898351737.1598796130; _gid=GA1.2.529034958.1598796132; ltkSubscriber-Footer=eyJsdGtDaGFubmVsIjoiZW1haWwiLCJsdGtUcmlnZ2VyIjoibG9hZCJ9; GSIDuTRIeOHB3qZL=ea4e6011-9474-492d-aef0-604aac7f1bb3; STSID307658=ba991bac-57fe-4fb0-ad61-4d6ee3d65679; ltkpopup-suppression-7c7cf77d-cc93-41b5-a050-b01d29782461=1; __qca=P0-1665003281-1598796238044; _vuid=251ab9fb-f390-484f-8919-ab9fa3acaa9e; ltkpopup-suppression-6e3e6c40-4bcb-4b08-9fb6-ae12dd98ff07=1; ltkpopup-session-depth=12-1; mylocation=%5B%7B%22type%22%3A%22other%22%2C%22id%22%3A%22295004%22%2C%22county%22%3A%22Miami-Dade%22%2C%22addcounty%22%3A0%2C%22country%22%3A%22US%22%2C%22label%22%3A%22Miami%2C+FL%22%2C%22uniqid%22%3A%22ZxwqUDik%22%7D%2C%7B%22type%22%3A%22zip%22%2C%22id%22%3A%2299850%22%2C%22county%22%3A%22Juneau%22%2C%22addcounty%22%3A0%2C%22country%22%3A%22US%22%2C%22label%22%3A%22Juneau%2C+AK%22%2C%22uniqid%22%3A%22sK3eVQ0w%22%7D%2C%7B%22type%22%3A%22zip%22%2C%22id%22%3A%2299850%22%2C%22county%22%3A%22Juneau%22%2C%22addcounty%22%3A0%2C%22country%22%3A%22US%22%2C%22label%22%3A%22Juneau%2C+AK%22%2C%22uniqid%22%3A%22gTT5YMeb%22%7D%2C%7B%22used_default%22%3Atrue%2C%22type%22%3A%22other%22%2C%22id%22%3A%22866642%22%2C%22country%22%3A%22US%22%2C%22label%22%3A%22Dublin%2C+NH%22%2C%22uniqid%22%3A%22Qdt9ogC7%22%7D%5D\",\n",
    "                'Host': \"www.almanac.com\",\n",
    "                'Sec-Fetch-Mode': \"navigate\",\n",
    "                'Sec-Fetch-Site': \"same-origin\",\n",
    "                'Sec-Fetch-User': \"?1\",\n",
    "                'Upgrade-Insecure-Requests': \"1\",\n",
    "                'Connection': \"keep-alive\",\n",
    "                'User-Agent': \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.135 Safari/537.36\",\n",
    "                'Cache-Control': \"no-cache\",\n",
    "                'Postman-Token': \"c7d43f08-aa2b-4ae6-a622-ebf6e6953893\"\n",
    "                            }\n",
    "                    formatted_lookup_URL = lookup_URL.format(dir_name.split('-')[0],\n",
    "                                                        dir_name.split('-')[1],\n",
    "                                                        current_date.year,\n",
    "                                                        current_date.month,\n",
    "                                                        current_date.day)\n",
    "            # 构造请求\n",
    "                    req = urllib.request.Request(url = formatted_lookup_URL, headers = headers)\n",
    "            # 加载网页，ignore参数的含义是：如果网页解码出现错误则忽略错误\n",
    "                    html = urllib.request.urlopen(req).read().decode(\"utf-8\",'ignore')\n",
    "            # 将爬取到的网页保存起来，存在前面建好的文件夹中，文件名用：年-月-日.html的格式来存\n",
    "                    out_file_name = '{}/{}-{}-{}.html'.format(dir_name, current_date.year,\n",
    "                                                            current_date.month,\n",
    "                                                            current_date.day)\n",
    "            \n",
    "            # 写入html文件并保存，编码改为utf-8防止程序因为Unicode编码而报的异常\n",
    "                    with open(out_file_name, 'w', encoding = 'utf-8') as out_file:\n",
    "                        out_file.write(html)\n",
    "            # 当前日期+1，进入下一次循环。\n",
    "                current_date += timedelta(days=1)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 5,
   "metadata": {
    "tags": []
   },
   "outputs": [],
   "source": [
    "# 选定城市（例如佛罗里达州，迈阿密）\n",
    "parse_station('FL-Miami')"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.4-final"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}