#!/usr/bin/env python
# coding=utf-8

import os, sys, re, traceback, logging
import urllib
import time
import datetime
import getopt
import urllib2
from Route import Route


vacation_info_re_pre = re.compile(r'''
        <Line>(.*?)<\s*/\s*Line>
        ''', re.X|re.S)
xml_node_re = re.compile(r'''
        <\s*(\w+)\s*>\s*(.*?)\s*<\s*/\s*\1\s*>
        ''', re.X|re.S)
redetail = re.compile('<div\sclass="se_Cha_con">\s+?<div[\s\S]+?>([\s\S]+?)</div>', re.IGNORECASE)
relist = re.compile('<div\sclass="se_Arr_traveTitle Traveling_t\d+">[\s\S]+?</div>[\s]+</div>',  re.IGNORECASE)
retitle = re.compile('<div\sclass="se_Arr_traveTitle Traveling_t\d+">([\s\S]+?)</div>',  re.IGNORECASE)
replan = re.compile(r'<div\sclass="textRight">([\s\S]+?)</div>', re.IGNORECASE)

def do_crawl():
    try:
        server_url = 'http://www.sotuan.com/config/linelist.xml'
        
        # TODO
        #html, _ = curl_handle.open(server_url)
        html = urllib2.urlopen(server_url).read()
    
        vacation_info_list = vacation_info_re_pre.findall(html)
        #print vacation_info_list
        for vacation_info in vacation_info_list:
            #print "-----------------------------------------------------"
            vacation_info_dict = dict(xml_node_re.findall(vacation_info))
            if not vacation_info_dict:
                print >>sys.stderr, 'cannot match xml_node_re'
                continue
            
            v = vacation_info_dict
    
            route = Route()
            route.site = '2'
            route.supplier_url = "http://www.sotuan.com/"
            #print 'TotalDays: ' + v.get('TotalDays', '')
            route.days = v.get('TotalDays', '')
            #print 'BiddingPriceCom: ' + v.get('BiddingPriceCom', '') 
            
            #print 'AddDate: ' + v.get('AddDate', '') 
            #print 'State: ' + v.get('State', '') 
            #print 'Type: ' + v.get('Type', '') 
            #print 'Province: ' + v.get('Province', '') 
            #print 'StartDate: ' + v.get('StartDate', '') 
            route.dates = v.get('StartDate', '') 
            #print 'Brand: ' + v.get('Brand', '') 
            route.supplier = v.get('Brand', '') 
            #print 'Hotel: ' + v.get('Hotel', '') 
            #print 'MarketPrice: ' + v.get('MarketPrice', '') 
            route.price = v.get('MarketPrice', '') 
            #print 'Name: ' + v.get('Name', '') 
            route.title = v.get('Name', '')
            #print 'CatalogType: ' + v.get('CatalogType', '') 
            #print 'maxStartTime: ' + v.get('maxStartTime', '') 
            #print 'Url: ' + v.get('Url', '') 
            route.rout_url = v.get('Url', '') 
            #print 'AgencyUrl: ' + v.get('AgencyUrl', '') 
            #print 'Summary: ' + v.get('Summary', '') 
            
            route.type = v.get('Summary', '') 
            #print 'traffic: ' + v.get('traffic', '')
            route.go_t = v.get('traffic', '')
            route.back_t =  v.get('traffic', '')
            #print 'PersonNum: ' + v.get('PersonNum', '') 
            route.person = v.get('PersonNum', '') 
            #print 'LineID: ' + v.get('LineID', '') 
            #print 'Tel: ' + v.get('Tel', '')
            route.telphone = v.get('Tel', '')
            #print 'StartCity: ' + v.get('StartCity', '') 
            route.outcity = v.get('StartCity', '') 
            #print 'Keywords: ' + v.get('Keywords', '') 
            route.sights = v.get('Keywords', '') 
            #print 'minStartTime: ' + v.get('minStartTime', '') 
                
            response=urllib2.urlopen(v.get('Url', ''))
            html=response.read().decode("utf-8").encode("utf-8")
            route.detail = getDetail(html)
            #print 'Detail:' + getDetail(html)
            route.plane = getPlane(html)
            #print 'Plane:' + getPlane(html)
            print >>sys.stderr, 'updating', route.supplier, route.title
            route.updRoute()
    except:
        print traceback.format_exc(sys.exc_info())

def getPlane(html):
     list=relist.findall(html)
     plan_str = ""
     count = 1
     for i in list:
         title = retitle.search(i).group(1).strip() 
         plan = replan.search(i).group(1).strip() 
         plan_str = plan_str + "#第" + str(count) +"天# " + title + plan
         count += 1
     return plan_str

def getDetail(html): 
     detail_str = redetail.search(html).group(1).strip()
     return detail_str

def usage():
    print '''Usage: %s [OPTIONS]...
Crawl hotel informations.

  -d,  --debug               enable pdb debugger

  -h,  --help                display this help and exit
''' % sys.argv[0]


if __name__ == '__main__':
    script_path = os.path.dirname(os.path.realpath(__file__))
    os.chdir(script_path)
    do_debug = False

    try:
        opts, args = getopt.gnu_getopt(sys.argv[1:], 'hdno:p:', ['help', 'debug', 'no-cache', 'only-cache', 'provider'])
    except getopt.GetoptError:
        usage()
        sys.exit()

    for opt, arg in opts:
        if opt in ('-h', '--help'):
            usage()
            sys.exit()
        elif opt in ('-d', '--debug'):
            do_debug = True
        elif opt in ('-n', '--no-cache'):
            use_cache = False
        elif opt in ('-o', '--only-cache'):
            if arg.lower() in ('no', 'n', '0'):
                only_cache = False
            else:
                only_cache = True
        elif opt in ('-p', '--provider'):
            pass

    if do_debug:
        import pdb
        pdb.set_trace()
    do_crawl()



