# coding: utf-8

# url = "http://touch.qunar.com/h5/train/trainList?station=%E5%8C%97%E4%BA%AC&searchType=sta&sort=3&date=2014-04-12"

# url = '''http://touch.qunar.com/h5/train/trainList?startStation=%E5%8C%97%E4%BA%AC&endStation=%E4%B8%8A%E6%B5%B7&searchType=stasta&date=2014-04-09&sort=3&pageNum=page_var&tpl=train.trainListTpl'''

# query lines of Beijing station
url = "http://touch.qunar.com/h5/train/trainList?station=%E5%8C%97%E4%BA%AC&searchType=sta&pageNum=page_var&tpl=train.trainListTpl"
train_url = url[:url.find("/train/")+7]


# http://touch.qunar.com/h5/train/trainStaChoose?trainNum=k15
search_train_line_via_num_url = "http://touch.qunar.com/h5/train/trainDetail?trainNum=%s"
search_train_line_via_station_url = "http://touch.qunar.com/h5/train/trainList?station=%s&searchType=sta&pageNum=%d&tpl=train.trainListTpl" # need station and page_num params

import urllib, time, os
from BeautifulSoup import BeautifulSoup
import threading
# import pickle
import cPickle as pickle


def get_train_line_via_num(train_num):
    detailpage = BeautifulSoup(urllib.urlopen(search_train_line_via_num_url % train_num))
    station_div = detailpage.find("div", {"class":"stationList"})
    station_list = station_div.findAll("tr")[1:]
    train_line = []
    for sta in station_list:
        data = sta.findAll("td")[1:]
        station = data[0].span.text
        if data[1].text == u'起点站':
            arrive_time = time.strptime(data[2].text, "%H:%M") # the sapme as departure time
        else:
            arrive_time = time.strptime(data[1].text, "%H:%M")
        if data[2].text == u'终到站':
            departure_time = time.strptime(data[1].text, "%H:%M") # the same as arrive time
        else:
            departure_time = time.strptime(data[2].text, "%H:%M")

        station_info = (station, arrive_time, departure_time)
        train_line.append(station_info)
    return train_line


# return the list of lines
def get_train_lines_via_station(station):
    train_lines = []
    page_num = 0
    while True:
        page_num += 1
        detailpage = BeautifulSoup(urllib.urlopen(
                search_train_line_via_station_url % (station.encode('utf-8'), page_num)))
                
        if detailpage != BeautifulSoup(""):
            trainlist = detailpage.findAll("li")
            for traininfo in trainlist:
                train_lines.append(traininfo.a.p.span.next)
        else:
            break

    return train_lines



save_session_flag = False

class GetInput(threading.Thread):
    def __init__(self):
        threading.Thread.__init__(self)

    def run(self):
        global save_session_flag
        while True:
            str = ''
            while str == '':
                str = raw_input("> ")
            if str == "save":
                print "save current session until current scanning finished, waiting please ..."
                save_session_flag = True
                break

intermediate_file = "train_intermediate.dat"
if os.path.exists(intermediate_file):
    print "restore data from %s ..." % intermediate_file
    train_lines, finish_stations, i = pickle.load(open(intermediate_file, "rb"))
else:
    train_lines = []
    finish_stations = []
    i = 0

# get cmd from terminal
getinput = GetInput()
getinput.start()

    
    
def add_train_line_from_station(station):
    global train_lines, finish_stations
    if station in finish_stations:
        return
    finish_stations.append(station)
    train_nums = get_train_lines_via_station(station)
    for num in train_nums:
        # if num not in train_lines:
        if num not in [line[0] for line in train_lines]:
            train_line = get_train_line_via_num(num)
            train_lines.append( (num, train_line) )
            print "add train line from %s : %s (%s->%s)" % (station,num, train_line[0][0], train_line[-1][0])

            
add_train_line_from_station(u'东营') # the start station

while i < len(train_lines):
    trainline = train_lines[i]
    print "### scan train line : ", trainline[0]
    for station_info in trainline[1]:
        add_train_line_from_station(station_info[0]) # station may already finished
    i += 1
    
    if save_session_flag:       # save intermediate data
        print "now save session : %d@%d lines  ..." % (i, len(train_lines))
        pickle.dump( (train_lines, finish_stations, i),
                     open(intermediate_file, "wb"), True) # save in compressed format
        exit()

            


# for trainline in train_lines:        # train_lines is increasing when iteration 
#     print "scan train line : ", trainline[0]
#     for station_info in trainline[1]:
#         if station_info not in cities:
#             add_train_line_from_station(station_info[0]) # station may already finished

                
# train_line = get_train_line_via_num("k15")
# for station_info in train_line:
#     print station_info[0], time.strftime("%H:%M", station_info[1]), time.strftime("%H:%M", station_info[2])

print "all train line finished, now save data ..."
trains = {}
for i in train_lines:
    trains[i[0]] = i[1]
    
pickle.dump(trains, open("trainlines.lst", "wb"), True)


