from yaml import load
import os
import sys
import urllib
import string
import unzip
import re
import pyspatialite.dbapi2 as sqlite3

class Unbuffered:
  def __init__(self, stream):
    self.stream = stream
  def write(self, data):
    self.stream.write(data)
    self.stream.flush()
  def __getattr__(self, attr):
    return getattr(self.stream, attr)

sys.stdout=Unbuffered(sys.stdout)
datadir = 'data'
if not os.path.exists(datadir):
  os.makedirs(datadir)

dsfile = open('datasets/pdx.yaml')
datasets = load(dsfile)
conn = sqlite3.connect('test.sqlite')
cursor = conn.cursor()
spatialite_version = ''
TABLE_PREFIX = 'pdx_'

# test for spatialite
try:
  spatialite_version = cursor.execute('SELECT spatialite_version();').fetchall()[0][0]
except:
  raise Exception('spatialite version could not be retrieved!')

def table_exists(table):
  if [] == cursor.execute("PRAGMA table_info(%s)" % table).fetchall():
    return False
  else:
    return True

def init_db():
  if table_exists('spatial_ref_sys'):
    print "Spatial Database already initialized"
    return
  sqlpath = 'sql' + os.path.sep + 'init_spatialite-2.3.sql'
  if not os.path.exists(sqlpath):
    raise Exception('the geospatial init sql seems to be missing... (%s)' % sqlpath)
  sqlf = file(sqlpath,"r").read()
  conn.isolation_level = 'DEFERRED'  
  cursor.executescript(sqlf)
  conn.commit()
  print "Initialized Spatial Database"

def download_dataset(ds):
  if not os.path.exists(datadir + os.sep + ds):
    os.makedirs(datadir + os.sep + ds)
  localfile = datadir + os.sep + ds + os.sep + datasets[ds]['archive']
  if(not os.path.exists(localfile)):
    print "downloading...",
    urllib.urlretrieve(datasets[ds]['uri'],localfile)

def extract_dataset(ds):
  extract = False
  exdir = datadir + os.sep + ds + os.sep
  archive = exdir + datasets[ds]['archive']
  if not os.path.exists(archive):
    raise Exception('the archive for %s has not been downloaded or is missing.' % ds)
  if not 'files' in datasets[ds]:
    extract = True
  else:
    for i,file in enumerate(datasets[ds]['files']):
      fileplus = file + '.' + datasets[ds]['types'][i] if ('types' in datasets[ds]) else datasets[ds]['files'][i] 
      if not os.path.exists(exdir + fileplus) and not os.path.exists(exdir + file):
        extract = True
        break
  if(extract):
    un = unzip.unzip()
    print "extracting...",
    un.extract(archive,datadir + os.sep + ds + os.sep)
  else:
    print "already extracted...",

def import_csv(table,file,encoding):
  conn.isolation_level = 'DEFERRED'
  cursor.execute("CREATE VIRTUAL TABLE tmp_csv_import USING VirtualText('%s',%s, 1, COMMA, DOUBLEQUOTE, ',');" % (file,encoding))
  cursor.execute("CREATE TABLE IF NOT EXISTS %s AS SELECT * FROM tmp_csv_import;" % table)
  cursor.execute("DROP TABLE tmp_csv_import;")
  conn.commit()
  conn.isolation_level = None

def import_shp(table,file,encoding,proj):
  conn.isolation_level = 'DEFERRED'
  cursor.execute("CREATE VIRTUAL TABLE tmp_shp_import USING VirtualShape('%s',%s,%s);" % (file,encoding,proj))
  cursor.execute("CREATE TABLE IF NOT EXISTS %s AS SELECT * FROM tmp_shp_import;" % table)
  cursor.execute("DROP TABLE tmp_shp_import;")
  conn.commit()
  conn.isolation_level = None

def import_dataset(ds):
  exdir = datadir + os.sep + ds + os.sep
  for i,file in enumerate(datasets[ds]['files']):
    type = 'none'
    table = 'none'
    encoding = 'CP1252' # TODO: pull from ds list
    proj = '3646' # TODO: pull from ds list
    if 'types' in datasets[ds] and len(datasets[ds]['types']) > i:
      type = datasets[ds]['types'][i]
    elif string.split(file,'.')[-1] == 'shp' or string.split(file,'.')[-1] == 'csv':
      type = string.split(file,'.')[-1]
    else:
      raise Exception('unknown type for file %s in dataset %s' % (file,ds))
    if 'tables' in datasets[ds] and len(datasets[ds]['tables']) > i:
      table = datasets[ds]['tables'][i]
    else:
      table = string.replace(ds,'-','_')
    table = TABLE_PREFIX + table
    if table_exists(table):
      print "table exists...",
      continue
    if type == 'shp':
      import_shp(table,exdir + string.replace(file,'.shp',''),encoding,proj)
    else:
      import_csv(table,exdir + file,encoding)
    print "table created...",

def recover_geometry_columns():
  tables = cursor.execute("select m.name from sqlite_master m LEFT OUTER JOIN geometry_columns g ON g.f_table_name = m.name WHERE g.f_table_name ISNULL AND m.sql LIKE '%%GEOMETRY,%%';").fetchall()
  for table in tables:
    typeraw = cursor.execute("select substr(AsText(Geometry),0,50) FROM %s LIMIT 1;" % table[0]).fetchone()
    type = string.split(typeraw[0],"(")[0]
    print "Recovering Geometry Column for %s of type %s..." % (table[0],type),
    cursor.execute("SELECT RecoverGeometryColumn('%s','Geometry',3646,'%s',2)" % (table[0],type))
    print "done"
  
init_db()

for ds in datasets:
  print "Processing %s..." % ds,
  if not 'files' in datasets[ds]:
    print "'files' not defined, skipping"
    continue
  """Prepopulate the archive key"""
  datasets[ds]['archive'] = datasets[ds]['archive'] if ('archive' in datasets[ds]) else (string.split(datasets[ds]['uri'],'/')[-1])
  """Download the dataset"""
  download_dataset(ds)
  """Extract the dataset"""
  extract_dataset(ds)
  """import the dataset"""
  import_dataset(ds)
  print "done"

recover_geometry_columns()
