#!/usr/bin/env python

import os, sys, csv
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
import settings
from gtfs.models import *
from zipfile import ZipFile
from util import compose_meta, meta_append_to_list, meta_name_attributes
import datetime, time, traceback
from django.db import transaction
from django.contrib.gis.geos import Point, LineString

class ImportError(Exception):
    pass

class ImportWarning(Exception):
    pass

class ValueLoader(object):
    pass

loaders = []

mclass = compose_meta((meta_append_to_list, (loaders,)), 
                      (meta_name_attributes, 
                       (lambda name, value: isinstance(value, ValueLoader), 
                        lambda name, instance: setattr(instance, "name", name))))

class Value(ValueLoader):
    def returns_tuple(tplen):
        def decorate_func(f):
            f.tplen = tplen
            return f
        return decorate_func
        
    @classmethod
    def unicode_or_none(cls, v):
        if v == '': return None
        else: return v

    @classmethod
    def float_or_none(cls, v):
        if v == '': return None
        else: return float(v)

    @classmethod
    def int_or_none(cls, v):
        if v == '': return None
        else: return int(v)

    @classmethod
    def bool(cls, v):
        if v == '1': return True
        return False

    @classmethod
    def date_or_none(cls, v):
        if v == '': return None
        return datetime.date(*time.strptime(v, "%Y%m%d")[:3])

    @classmethod
    def hms_to_int(cls, v):
        if v == '': return None
        h, m, s = map(int, v.split(':'))
        return h*3600 + m*60 + s

    @classmethod
    @returns_tuple(2)
    def hms_to_int_and_days(cls, v):
        secs = Value.hms_to_int(v)
        if secs is not None:
            days = secs / 86400
            secs = secs - days * 86400
        else:
            days = None
        return secs, days

    def __init__(self, from_column, **kwargs):
        self.iter_count = 0
        self.from_column = from_column
        self.trans = kwargs.get('trans') or self.unicode_or_none

    def __iter__(self):
        return self

    def next(self):
        if not self.trans or not hasattr(self.trans, "tplen"):
            raise StopIteration
        if self.iter_count == self.trans.tplen:
            raise StopIteration
        idx = self.iter_count
        self.iter_count += 1
        return TupleValue(self, idx)

    def file(self, name_to_index):
        self.column_index = name_to_index[self.from_column]

    def __call__(self, row):
        if self.column_index == None:
            raise KeyError(self.from_column)
        else:
            return self.trans(row[self.column_index])

class OptionalValue(Value):
    def __init__(self, *args, **kwargs):
        super(OptionalValue, self).__init__(*args, **kwargs)
        self.default = kwargs.get('default', None)

    def file(self, *args, **kwargs):
        try:
            super(OptionalValue, self).file(*args, **kwargs)
        except KeyError, e:
            self.column_index = None

    def __call__(self, *args, **kwargs):
        if self.column_index == None:
            return self.default
        return super(OptionalValue, self).__call__(*args, **kwargs) or self.default

class TupleValue(Value):
    def __init__(self, parent_value, idx):
        self.parent_value, self.idx = parent_value, idx

    def file(self, *args, **kwargs):
        self.parent_value.file(*args, **kwargs)

    def __call__(self, *args, **kwargs):
        tp = self.parent_value.__call__(*args, **kwargs)
        return tp[self.idx]

class GTFSLoader(object):
    _ignore = ('GTFSLoader', )
    __metaclass__ = mclass

    def __init__(self, dataset):
        self.defer = []
        self.dataset = dataset

class AgencyLoader(GTFSLoader):
    filename = "agency.txt"
    required = True
    __metaclass__ = mclass
    text_id = OptionalValue("agency_id", default="")
    name = Value("agency_name")
    url = Value("agency_url")
    timezone = Value("agency_timezone")
    lang = OptionalValue("agency_lang", default="")
    phone = OptionalValue("agency_phone", default="")

    def row(self, cols):
        # just go straight through to a django object for this one
        agency = Agency(dataset=self.dataset, **cols)
        agency.save()

class StopLoader(GTFSLoader):
    object_class = Stop
    filename = "stops.txt"
    required = True
    __metaclass__ = mclass
    text_id = Value("stop_id")
    code = OptionalValue("stop_code", default="")
    name = Value("stop_name")
    desc = OptionalValue("stop_desc", default="")
    lat = Value("stop_lat", trans=Value.float_or_none)
    lon = Value("stop_lon", trans=Value.float_or_none)
    zone = OptionalValue("zone_id")
    url = OptionalValue("stop_url", default="")
    is_station = OptionalValue("location_type")
    parent_station = OptionalValue("parent_station")

    def row(self, cols):
        parent_station_id = cols.pop('parent_station')
        lat = cols.pop('lat')
        lon = cols.pop('lon')
        stop = Stop(dataset=self.dataset,
                    parent_station=None,
                    point=Point(lat, lon),
                    **cols)
        stop.save()
        if parent_station_id is not None:
            self.defer.append((parent_station_id, stop.id))

    def finish(self):
        for parent_station_id, stop_id in self.defer:
            stop = Stop.objects.get(id=stop_id)
            stop.parent_station = Stop.objects.get(text_id=parent_station_id)
            if not stop.parent_station.is_station:
                raise Exception("parent station of %s is not a station: %s" % (stop, stop.parent_station))
            stop.save()

class RoutesLoader(GTFSLoader):
    object_class = Route
    filename = "routes.txt"
    required = True
    __metaclass__ = mclass
    text_id = Value("route_id")
    agency_id = OptionalValue("agency_id")
    short_name = Value("route_short_name", default="")
    long_name = Value("route_long_name", default="")
    desc = OptionalValue("route_desc", default="")
    route_type = Value("route_type", trans=Value.int_or_none)
    url = OptionalValue("route_url", default="")
    colour = OptionalValue("route_color", default="")
    text_colour = OptionalValue("route_text_colour", default="")

    def __init__(self, *args, **kwargs):
        super(RoutesLoader, self).__init__(*args, **kwargs)
        agencies = self.dataset.agency_set.all()
        if len(agencies) == 1:
            self.default_agency = agencies[0]
        else:
            self.default_agency = None

    def row(self, cols):
        if cols.has_key('agency_id'):
            agency_id = cols.pop('agency_id')
            agency = self.dataset.agency_set.get(text_id=agency_id)
        else:
            agency = self.default_agency
        if not agency:
            raise Exception("Unable to find agency object for route")
        route_type = RouteType.objects.get(id=cols.pop('route_type'))
        route = Route(agency=agency, route_type=route_type, **cols)
        route.save()

class CalendarLoader(GTFSLoader):
    object_class = Calendar
    filename = "calendar.txt"
    required = True
    __metaclass__ = mclass
    service_id = Value("service_id")
    monday = Value("monday", trans=Value.bool)
    tuesday = Value("tuesday", trans=Value.bool)
    wednesday = Value("wednesday", trans=Value.bool)
    thursday = Value("thursday", trans=Value.bool)
    friday = Value("friday", trans=Value.bool)
    saturday = Value("saturday", trans=Value.bool)
    sunday = Value("sunday", trans=Value.bool)
    start_date = Value("start_date", trans=Value.date_or_none)
    end_date = Value("end_date", trans=Value.date_or_none)

    def row(self, cols):
        service_id = cols.pop('service_id')
        service = Service(dataset=self.dataset, 
                          text_id=service_id)
        service.save()
        calendar = Calendar(service=service,
                            **cols)
        calendar.save()

class CalendarDatesLoader(GTFSLoader):
    object_class = CalendarDate
    filename = "calendar_dates.txt"
    required = False
    __metaclass__ = mclass
    service_id = Value("service_id")
    date = Value("date", trans=Value.date_or_none)
    exception_type = Value("exception_type")

    def row(self, cols):
        exception_type = cols.pop("exception_type")
        add_or_remove_service = exception_type == "1"
        service_id = cols.pop("service_id")
        try:
            service = Service.objects.get(text_id=service_id,
                                          dataset=self.dataset)
        except Service.DoesNotExist:
            service = Service(dataset=self.dataset,
                              text_id=service_id)
            service.save()
        calendar_date = CalendarDate(add_or_remove_service=add_or_remove_service,
                                     service=service,
                                     **cols)
        calendar_date.save()

class ShapesLoader(GTFSLoader):
    object_class = Shape
    filename = "shapes.txt"
    required = False
    __metaclass__ = mclass
    text_id = Value("shape_id")
    lat = Value("shape_pt_lat", trans=Value.float_or_none)
    lon = Value("shape_pt_lon", trans=Value.float_or_none)
    sequence = Value("shape_pt_sequence", trans=Value.int_or_none)
    dist_travelled = OptionalValue("shape_dist_travelled", trans=Value.float_or_none)

    def __init__(self, *args, **kwargs):
        super(ShapesLoader, self).__init__(*args, **kwargs)
        self.by_textid = {}

    def row(self, cols):
        text_id = cols.pop('text_id')
        if not self.by_textid.has_key(text_id):
            self.by_textid[text_id] = []
        self.by_textid[text_id].append((cols['sequence'],
                                        cols['lat'],
                                        cols['lon'],
                                        cols['dist_travelled']))

    def postfile(self):
        print "adding", len(self.by_textid), "shapes"
        for i, text_id in enumerate(self.by_textid):
            if i % 10 == 0:
                sys.stdout.write(".")
                sys.stdout.flush()
            points = []
            for pt in self.by_textid[text_id]:
                sequence, lat, lon, dist_travelled = pt
                points.append((sequence, Point(lat, lon)))
            points.sort(lambda a, b: cmp(a[0], b[0]))
            shape = Shape(dataset=self.dataset,
                          text_id=text_id,
                          line_string=LineString([t[1] for t in points]))
            shape.save()
        print " done."

class TripsLoader(GTFSLoader):
    object_class = Trip
    filename = "trips.txt"
    required = True
    __metaclass__ = mclass
    route_id = Value("route_id")
    service_id = Value("service_id")
    text_id = Value("trip_id")
    headsign = OptionalValue("trip_headsign", default="")
    short_name = OptionalValue("trip_short_name", default="")
    direction_id = OptionalValue("direction_id")
    block_id = OptionalValue("block_id")
    shape_id = OptionalValue("shape_id")

    def row(self, cols):
        route_id = cols.pop("route_id")
        route = Route.objects.get(agency__dataset=self.dataset,
                                  text_id=route_id)
        service_id = cols.pop("service_id")
        service = Service.objects.get(text_id=service_id,
                                      dataset=self.dataset)
        inbound = outbound = False
        if cols.has_key("direction_id"):
            direction_id = cols.pop("direction_id")
            if direction_id == "0":
                outbound = True
            elif direction_id == "1":
                inbound = True
        block = None
        if cols.has_key('block_id'):
            block_id = cols.pop('block_id')
            if block_id is not None:
                try:
                    block = Block.objects.get(text_id=block_id)
                except Block.DoesNotExist:
                    print "add block", block_id
                    block = Block(dataset=self.dataset,
                                  text_id=block_id)
                    block.save()
        shape = None
        if cols.has_key('shape_id'):
            shape_id = cols.pop('shape_id')
            if shape_id is not None:
                    shape = Shape.objects.get(text_id=shape_id)
        trip = Trip(route=route,
                    service=service,
                    inbound=inbound,
                    outbound=outbound,
                    block=block,
                    shape=shape,
                    **cols)
        trip.save()

class StopTimeLoader(GTFSLoader):
    object_class = StopTime
    filename = "stop_times.txt"
    required = True
    __metaclass__ = mclass
    trip_id = Value("trip_id")
    arrival_time, arrival_days = Value("arrival_time", trans=Value.hms_to_int_and_days)
    departure_time, departure_days = Value("departure_time", trans=Value.hms_to_int_and_days)
    stop_id = Value("stop_id")
    sequence = Value("stop_sequence", trans=Value.int_or_none)
    headsign = OptionalValue("stop_headsign", default="")
    pickup_type_id = OptionalValue("pickup_type", trans=Value.int_or_none)
    drop_off_type_id = OptionalValue("drop_off_type", trans=Value.int_or_none)
    shape_dist_travelled = OptionalValue("shape_dist_travelled", trans=Value.float_or_none)

    def row(self, cols):
        trip_id = cols.pop("trip_id")
        trip = Trip.objects.get(text_id=trip_id, service__dataset=self.dataset)
        stop_id = cols.pop("stop_id")
        stop = Stop.objects.get(text_id=stop_id, dataset=self.dataset)
        pickup_type = drop_off_type = None
        pickup_type_id = cols.pop("pickup_type_id") or 0
        pickup_type = Arrangements.objects.get(id=pickup_type_id)
        drop_off_type_id = cols.pop("drop_off_type_id") or 0
        drop_off_type = Arrangements.objects.get(id=drop_off_type_id)
        stop_time = StopTime(trip=trip,
                             stop=stop,
                             pickup_type=pickup_type,
                             drop_off_type = drop_off_type,
                             **cols)
        stop_time.save()

class FrequencyLoader(GTFSLoader):
    object_class = Frequency
    filename = "frequencies.txt"
    required = False
    __metaclass__ = mclass
    trip_id = Value("trip_id")
    start_time, start_time_days = Value("start_time", trans=Value.hms_to_int_and_days)
    end_time, end_time_days = Value("end_time", trans=Value.hms_to_int_and_days)
    headway_secs = Value("headway_secs", trans=Value.int_or_none)

    def row(self, cols):
        trip_id = cols.pop("trip_id")
        trip = Trip.objects.get(text_id=trip_id, service__dataset=self.dataset)
        frequency = Frequency(trip=trip,
                              **cols)
        frequency.save()

class TransfersLoader(GTFSLoader):
    object_class = Transfers
    filename = "transfers.txt"
    required = False
    __metaclass__ = mclass
    from_stop_id = Value("from_stop_id")
    to_stop_id = Value("to_stop_id")
    transfer_type_id = Value("transfer_type")
    min_transfer_time = OptionalValue("min_transfer_time", trans=Value.int_or_none)

    def row(self, cols):
        from_stop_id = cols.pop("from_stop_id")
        from_stop = Stop.objects.get(text_id=from_stop_id,
                                     dataset=self.dataset)
        to_stop_id = cols.pop("to_stop_id")
        to_stop = Stop.objects.get(text_id=to_stop_id,
                                   dataset=self.dataset)
        transfer_type = TransferType.get(id=transfer_type_id)
        transfers = Transfers(from_stop=from_stop,
                              to_stop=to_stop,
                              transfer_type=transfer_type,
                              **cols)
        transfers.save()

class FareAttributesLoader(GTFSLoader):
    filename = "fare_attributes.txt"
    required = False
    __metaclass__ = mclass

class FareRulesLoader(GTFSLoader):
    object_class = FareRule
    filename = "fare_rules.txt"
    required = False
    __metaclass__ = mclass

class ImportManager(object):
    def __init__(self, source, file_path, dry_run=None):
        self.zf = ZipFile(file_path)
        active_loaders = self.check_loaders()

        class DRE(Exception):
            pass

        @transaction.commit_on_success
        def run():
            try:
                self.dataset = Dataset(source=source, file_path=file_path, imported=datetime.datetime.now())
                self.dataset.save()
                instances = dict(map(lambda l: (l, l(self.dataset)),
                                     active_loaders))
                for loader in active_loaders:
                    self.run(instances[loader])
                    self.postfile(instances[loader])
                for loader in active_loaders:
                    self.finish(instances[loader])
            except:
                print "Exception in run()..."
                traceback.print_exc()
                raise
            if dry_run:
                raise DRE()

        try:
            run()
        except DRE:
            print "Dry run import completed sucessfully."

    def warning(self, s):
        print >>sys.stderr, "warning:", s
    
    def error(self, s):
        print >>sys.stderr, "error:", s
        raise ImportError(s)

    def check_loaders(self):
        active = []
        for loader in loaders:
            try:
                info = self.zf.getinfo(loader.filename)
                active.append(loader)
            except KeyError:
                if loader.required:
                    self.error("required file '%s' not present in archive." % (loader.filename))
                else:
                    self.warning("file '%s' not present in archive." % (loader.filename))
        return active

    def run(self, loader):
        def reader():
            for i in csv.reader(self.zf.read(loader.filename).splitlines()):
                yield [unicode(t.strip()) for t in i]

        loader_title = type(loader).__name__.rsplit('.', 1)[-1] + ": "
        print loader_title, 
        lines = reader()
        header = lines.next()

        # build a hash table to enable swift column name -> row index
        name_to_index = {}
        for i, name in enumerate(header):
            name_to_index[name] = i

        mappers = filter(lambda x: isinstance(x, ValueLoader), 
                         map(lambda x: getattr(loader, x), 
                             dir(loader)))

        if not mappers:
            return

        if not hasattr(loader, "row"):
            return

        for mapper in mappers:
            mapper.file(name_to_index)

        for i, row in enumerate(lines):
            value_dict = dict(((mapper.name, mapper(row)) for mapper in mappers))
            if i % 1000 == 0:
                sys.stdout.write(".")
                sys.stdout.flush()
            loader.row(value_dict)
        print " (%d rows) done." % (i+1)

    def postfile(self, loader):
        if hasattr(loader, "postfile"):
            print "postfile..", loader
            loader.postfile()

    def finish(self, loader):
        if hasattr(loader, "finish"):
            print "finishing..", loader
            loader.finish()

if __name__ == '__main__':
    im = ImportManager(*sys.argv[1:])

