#!/usr/bin/env python
# coding=utf-8
'''
Created on 2010-08-03

@author: Jason.q.yan
'''
import os, sys, re, traceback, logging
import urllib
import time
import datetime
import getopt
import math
import urllib2
from Route import *

routeDiv_re = re.compile(r'''
       <tr>\s+?<td\swidth="2%"\sheight="27">([\s\S]+?)</tr>
               ''', re.X|re.S)
detail_re =  re.compile(r'''服务标准([\s\S]+?)备注信息''', re.X|re.S)
schedule_re = re.compile(r'''行程介绍([\s\S]+?)服务标准''', re.X|re.S)
server_urls = [
               ('省内游','http://www.516cct.com/line/class.asp?id=21'),
               ('省外游','http://www.516cct.com/line/class.asp?id=23'),
               ('出境游','http://www.516cct.com/line/class.asp?id=24'),
               ('自由行','http://www.516cct.com/line/class.asp?id=26')
              ]
def do_crawl():
    try:
        for vacation_type, server_url  in server_urls:
            html = urllib2.urlopen(server_url).read().decode('gb2312','ignore').encode('utf-8')
            #获取所有描述线路的HTML片段 
            route_list = routeDiv_re.findall(html)
            #遍历，解析
            for div in route_list:
                route = Route()
                route.site = 51
                route.supplier_url = 'http://www.516cct.com/'
                route.supplier = "四川康辉国际旅行社"
                route.outcity = '成都'
                route.telphone="028-86089855"
                route.go_t = route.back_t = "-"
                route.type = vacation_type
                route.title = re.compile(r'''class="14_666666">([\s\S]+?)</a>''', re.X|re.S).search(div).group(1)
                if re.compile(r'''￥(\d+)</td>''', re.X|re.S).search(div) :
                    route.price = re.compile(r'''￥(\d+)</td>''', re.X|re.S).search(div).group(1)
                route.rout_url = route.supplier_url+"line/"+re.compile(r'''<a\shref="([\s\S]+?)"\starget="_blank"\sclass="14_666666">''', re.X|re.S).search(div).group(1)
                temp_date = re.compile(r'''<td\swidth="19%"\sclass="font6">([\s\S]+?)</td>''', re.X|re.S).search(div).group(1)
                if str(temp_date).count("月")==0 & str(temp_date).count("日")==0:
                    route.dates = re.compile(r'''<td\swidth="19%"\sclass="font6">([\s\S]+?)</td>''', re.X|re.S).search(div).group(1)
                #根据URL获取线路详细页面的HTML
                temp = urllib2.urlopen(route.rout_url).read().decode('gb2312','ignore').encode('utf-8')
                #进一步解析 
                do_parse(temp,route)
                print >>sys.stderr, 'updating', route.title,route.dates
                route.updRoute()
                #route.updRoute_Schedules()
                
    except:
        print traceback.format_exc(sys.exc_info())

#解析每一天的行程安排
def do_parse(html,route):
    try:
        route.days = re.compile(r'''旅游天数：(\d+)\s?天</td>''', re.X|re.S).search(html).group(1)
        temp_img_url= re.compile(r'''<a\shref="([\s\S]+?)"\starget="_blank"><img\ssrc="([\s\S]+?)"\swidth="271"\sheight="159"\sborder="0">''', re.X|re.S).search(html)
        if temp_img_url:
            if str(temp_img_url.group(2)).count("no.gif")==0:
                route.img_url=route.supplier_url+"line/"+temp_img_url.group(2)
    except: AttributeError
    detail_list = detail_re.findall(html)
    for detailhtml in detail_list:
        route.detail = re.compile(r'''class="show">([\s\S]+?)</td>''', re.X|re.S).search(detailhtml).group(1)
    return  

if __name__ == '__main__':
    script_path = os.path.dirname(os.path.realpath(__file__))
    os.chdir(script_path)
    do_debug = False

    try:
        opts, args = getopt.gnu_getopt(sys.argv[1:], 'hdno:p:', ['help', 'debug', 'no-cache', 'only-cache', 'provider'])
    except getopt.GetoptError:
        usage()
        sys.exit()

    for opt, arg in opts:
        if opt in ('-h', '--help'):
            usage()
            sys.exit()
        elif opt in ('-d', '--debug'):
            do_debug = True
        elif opt in ('-n', '--no-cache'):
            use_cache = False
        elif opt in ('-o', '--only-cache'):
            if arg.lower() in ('no', 'n', '0'):
                only_cache = False
            else:
                only_cache = True
        elif opt in ('-p', '--provider'):
            pass

    if do_debug:
        import pdb
        pdb.set_trace()
    do_crawl()
