#!/usr/bin/env python
# coding=utf-8
'''
Created on 2010-08-02

@author: Jason.q.yan
'''
import os, sys, re, traceback, logging
import urllib
import time
import datetime
import getopt
import math
import urllib2
from Route import *

routeDiv_re = re.compile(r'''
       <li>([\s\S]+?)</li>
               ''', re.X|re.S)
schedule_re =  re.compile(r'''
        <div\sclass="bar3"\sstyle="margin-top:7px;">([\s\S]+?)</div>
        ''', re.X|re.S)
                        
server_urls = [
               ('自由行','http://www.cct.cn/Data/LineList.asp?param1=zyx'),
               ('团队游','http://www.cct.cn/Data/LineList.asp?param1=tdy')
              ]
def do_crawl():
    try:
        for vacation_type, server_url  in server_urls:
            html = urllib2.urlopen(server_url).read().decode('gbk','ignore').encode('utf-8')
            #获取所有描述线路的HTML片段 
            route_list = routeDiv_re.findall(html)
            #遍历，解析
            for div in route_list:
                route = Route()
                route.site = 38
                route.supplier_url = 'http://www.cct.cn/'
                route.supplier = "中国康辉旅行社"
                route.telphone="40061 40031"
                route.outcity = '北京'
                route.go_t = route.back_t = "-"
                route.type = vacation_type
                route.title = re.compile(r'''<a\shref="([\s\S]+?)">([\s\S]+?)</a>''', re.X|re.S).search(div).group(2)
                if re.compile(r'''([\s\d]+?)(元|起)''', re.X|re.S).search(div) :
                    route.price = re.compile(r'''([\s\d]+?)(元|起)''', re.X|re.S).search(div).group(1)
                route.rout_url = route.supplier_url+re.compile(r'''<a\shref="([\s\S]+?)"''', re.X|re.S).search(div).group(1)
                #根据URL获取线路详细页面的HTML
                temp = urllib2.urlopen(route.rout_url).read().decode('gbk','ignore').encode('utf-8')
                #进一步解析 
                do_parse(temp,route)
                print >>sys.stderr, 'updating', route.title
                route.updRoute()
                route.updRoute_Schedules()
                
    except:
        print traceback.format_exc(sys.exc_info())

#解析每一天的行程安排
def do_parse(html,route):
    try:
        detail = re.compile(r'''<div\sclass="sortText"><strong>([\s\S]+?)</strong>''', re.X|re.S).search(html).group(1)
        if detail:
            route.detail =detail
    except: AttributeError
    line_detail_url =route.rout_url.replace("lineinfo.asp", "Data/LineDetail.asp")+"&dpt_eid=KCE&rec_type=xc"
    parthtml = urllib2.urlopen(line_detail_url).read().decode('gbk','ignore').encode('utf-8')
    html =parthtml
    schedule_list = schedule_re.findall(parthtml)
    if re.compile(r'''第(\d+)天''', re.X|re.S).search(parthtml):
            days_list=re.compile(r'''第(\d+)天''', re.X|re.S).findall(parthtml)
            if len(days_list)>0:
                route.days = str(len(days_list))
    for parthtml in schedule_list:
        schedule = Schedule()
        schedule.s_num = re.compile(r'''第(\d+)天''', re.X|re.S).search(parthtml).group(1)
        brief_temp = re.compile(r'''<div\sclass="sortText">([\s\S]+?)</div>''', re.X|re.S).search(html)
        if brief_temp:
            schedule.s_brief = parthtml
            if str(brief_temp.group(1)).count("</div>")==0:
                schedule.s_brief = schedule.s_brief+brief_temp.group(1)
        route.schedules.append(schedule)
    return 

if __name__ == '__main__':
    script_path = os.path.dirname(os.path.realpath(__file__))
    os.chdir(script_path)
    do_debug = False

    try:
        opts, args = getopt.gnu_getopt(sys.argv[1:], 'hdno:p:', ['help', 'debug', 'no-cache', 'only-cache', 'provider'])
    except getopt.GetoptError:
        usage()
        sys.exit()

    for opt, arg in opts:
        if opt in ('-h', '--help'):
            usage()
            sys.exit()
        elif opt in ('-d', '--debug'):
            do_debug = True
        elif opt in ('-n', '--no-cache'):
            use_cache = False
        elif opt in ('-o', '--only-cache'):
            if arg.lower() in ('no', 'n', '0'):
                only_cache = False
            else:
                only_cache = True
        elif opt in ('-p', '--provider'):
            pass

    if do_debug:
        import pdb
        pdb.set_trace()
    do_crawl()
