import urllib2, re, datetime, csv, pickle, sys, logging
from findDate import findDate, findPeriodic
import xml.etree.ElementTree

logging.basicConfig(level=logging.INFO, filename='eventscan.log', filemode='w')
    
class Website(object):
    tagStripper = re.compile(r'((<[^>]+>)([^<]*))', re.MULTILINE | re.DOTALL)
    def __init__(self, url, what, where):
        self.url = url.strip()
        self.what = what.strip()
        self.where = where.strip()
        self.lastScan = None
        self.dates = []
        self.content = None
        self.readError = None
        try:
            self.what = int(self.what)
        except ValueError:
            pass
        try:
            self.where = int(self.where)
        except ValueError:
            pass
    def __eq__(self, other):
        return (self.url == other.url) and (self.where == other.where) \
               and (self.when == other.when)
    def read(self):
        self.content = urllib2.urlopen(self.url).read()
        self.lastScan = datetime.datetime.now()
    def read(self):        
        request = urllib2.Request(self.url)
        request.add_header('User-Agent', 'GeekEventAggregator/0.3 +http://http://catherine.devlin.googlepages.com/geekEvent/')
        if self.lastScan:
            request.add_header('If-Modified-Since', str(self.lastScan)) #?format
        opener = urllib2.build_opener()
        try:
            # if no response b/c If-Modified-Since, don't replace content
            self.content = opener.open(request).read() # no StripFatalUnicode
            self.lastScan = datetime.datetime.now()
            self.readError = None
        except Exception, e:
            self.readError = str(e)
            print e        
    def chopped(self):
        itms = [itm.groups()[2].strip() for itm in self.tagStripper.finditer(self.content or '')]
        itms = [i.replace('&nbsp;','') for i in itms if i]
        return itms    
    def scan(self):
        self.read()
        strings = self.chopped() 
        logging.error('%s: what = %s, where = %s' % (self.url, str(self.what), str(self.where)))
        self.events = events(strings, self.what, self.where)
        print 'Events found for %s:' % (self.url)
        for e in self.events:
            e.website = self
            print e.name

class Event(object):
    def __init__(self, dt, sourceStrings, indx, 
                 name, place, contextWidth = 5):
        self.dt = dt
        self.source = sourceStrings[indx]        
        self.contextWidth = contextWidth
        self.contextStrings = sourceStrings[max(0,indx-contextWidth):indx]
        self.contextCenter = len(self.contextStrings)
        self.contextStrings.extend(sourceStrings[indx:indx+contextWidth+1])
        self.dateIndex = indx
        self.name = self.relative(name).strip()
        self.place = self.relative(place)
        self.coords = locFinder.get(' '.join(self.place.strip().upper().replace(',',' ').split()))
        logging.info('what:  %s' % self.name)
        logging.info('where: %s ---> %s' % (self.place, str(self.coords)))
        if not self.coords[0]:
            logging.error('loc lookup failed for %s' % (self.place))            
        logging.info('when:  %s ---> %s' % (self.source, str(self.dt)))        
    def csv(self):
        try:
            return '"%s", %s, "%s", "%s", %s, %s, "%s"\n' % (
                self.name.replace('"',"'").encode('US-ASCII','replace'),
                self.dt,
                    self.source.replace('"',"'").encode('US-ASCII','replace'),
                self.place.replace('"',"'").encode('US-ASCII','replace'),
                self.coords[0],self.coords[1],
                self.website.url)
        except:
            logging.error('UnicodeDecodeError')
            logging.error(self.name)
            logging.error(self.source)
            logging.error(self.place)
            return ''
            
    def __str__(self):
        return '%s: %s (%s: %s, %s)' % (str(self.dt), self.name, self.place, self.coords[0], self.coords[1])
    def context(self, indx):
        target = self.contextCenter + indx
        if ( 0 <= target < len(self.contextStrings) ):
            return self.contextStrings[target]
        else:
            raise IndexError
    def relative(self, offset):
        if isinstance(offset, int):
            return self.context(offset)
        else:
            return offset

class Store(object):
    def __init__(self):
        try:
            sourcefile = open(self.picklefile,'rb')
            self.data = pickle.load(sourcefile)
            sourcefile.close()
        except IOError:
            self.data = {}
    def save(self):
        pickle.dump(self.data, open(self.picklefile, 'wb'))
           
class SiteStore(Store):
    picklefile = 'sites.pickle'   
siteStore = SiteStore()
    
class LocFinder(Store):
    picklefile = 'locs.pickle'
    yahooAppid = 'dlk284_V34GGCZmFowvnDzL0RKEa0uPzmy2AG_uS.dw62j8YG.ZIhKi.xBq5j4AIj8IJ6lR3s5SZONZgiCBTqQ--'
    def fromYahoo(self, place):
        place = place.replace(' ','+')
        yahoo = urllib2.urlopen('http://local.yahooapis.com/MapsService/V1/geocode?appid=%s&location=%s' %
                                (self.yahooAppid, place))
        result = xml.etree.ElementTree.parse(yahoo)
        yahoo.close()
        return result
    def get(self, place):
        locEtree = self.data.get(place)
        if not locEtree:
            try:
                locEtree = self.fromYahoo(place)
            except urllib2.HTTPError:
                return (None, None)
            self.data[place] = locEtree
        locNode = locEtree.getroot()[0]
        return (float(locNode.find('{urn:yahoo:maps}Latitude').text),
                float(locNode.find('{urn:yahoo:maps}Longitude').text))       
locFinder = LocFinder()    
    
now = datetime.datetime.utcnow()

def events(itms, what, where):
    '''Searches strings for things that look like dates or periodic dates'''
    result = []
    for (indx, itm) in enumerate(itms):
        dates = findPeriodic(itm) or [findDate(itm)]
        for dt in dates:
            if dt and (dt > now):
                event = Event(dt, itms, indx, what, where)
                result.append(event)
                print str(event)
    return result

def refresh(url, sitesread):
    new = sitesread[url]
    if new != siteStore.data.get(url):
        siteStore.data[url] = new
    siteStore.data[url].scan()
        
def dump():
    outfile = open('events.csv','w')
    for (url, website) in siteStore.data.items():
        for event in website.events:
            if event.coords[0] and event.name:
                outfile.write(event.csv())
    outfile.close()
    
def main():
    sitesread = dict((row[0], Website(*row)) for row in csv.reader(open("websites.txt")))
    
    for url in sitesread.keys():
        refresh(url, sitesread)
    siteStore.save()
    dump()
    print "Don't forget to sort -u."
    
def oneSite(url):
    refresh(url)
    
if __name__ == '__main__':
    main()