#!/bin/sh

createdb -hlocalhost -Upostgres processdb

#Prepare the DB
gunzip base_processdb_dump.sql.gz
psql -hlocalhost -dprocessdb -Upostgres < base_processdb_dump.sql

#You might need to adjust also here the name of the shapefile
shp2pgsql -i -I -WLATIN1 -c -s4326 data/polygons.shp public.polygon | psql -hlocalhost -dprocessdb -Upostgres

psql -hlocalhost -dprocessdb -Upostgres -c"\COPY data_provider from 'data/data_provider.txt/'"
psql -hlocalhost -dprocessdb -Upostgres -c"\COPY data_resource from 'data/data_resource.txt/'"
psql -hlocalhost -dprocessdb -Upostgres -c"\COPY classification from 'data/taxonomy.txt/'"
psql -hlocalhost -dprocessdb -Upostgres -c"\COPY occurrence from 'data/occurrence.txt/'"

#Now starts the process scripts (1):
psql -hlocalhost -dprocessdb -Upostgres < processData1.sql


#Join the sites with the polygon table to see which sites fall inside the polygons
psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>0 and s.id<=500000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>500000 and s.id<=1000000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>1000000 and s.id<=1500000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>1500000 and s.id<=2000000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>2000000 and s.id<=2500000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>2500000 and s.id<=3000000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>3000000 and s.id<=3500000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>3500000 and s.id<=4000000;" & psql -hlocalhost -dprocessdb -Upostgres -c"insert into site_polygon(site_id,polygon_id) select s.id as site_id, p.id as polygon_id from site as s, polygon as p where s.the_geom && p.geom_simplified and distance(s.the_geom, p.geom_simplified) < 0.001 and s.id>4000000;" &

#We wait for the processes to finish before continuing
wait

#Now starts the process scripts (1):
psql -hlocalhost -dprocessdb -Upostgres < processData2.sql

#Export the requiered data for GBIF
psql -hlocalhost -dprocessdb -Upostgres -c"\COPY occurrence_polygon to 'data/occurrence_polygon.csv'"

psql -hlocalhost -dprocessdb -Upostgres -c"create table geo_region as select id, name as name,1 as region_type,country_iso_code as country_iso_code,num_species as species_count,num_occ as occurrence_count, (select count(*) from site_polygon where polygon_id=s.id) as occurrence_coordinate_count, ymin(Envelope(the_geom)) as min_latitude,ymax(Envelope(the_geom)) as max_latitude, xmin(Envelope(the_geom)) as min_longitude,xmax(Envelope(the_geom)) as max_longitude from polygon as s where num_occ>0;"

psql -hlocalhost -dprocessdb -Upostgres -c"\COPY geo_region to 'data/geo_region.csv'"
psql -hlocalhost -dprocessdb -Upostgres -c"DROP TABLE geo_region;"

gzip data/geo_region.csv data/occurrence_polygon.csv
echo "Files geo_region.csv.gz and occurrence_polygon.csv.gz prepared to be sent to GBIF."
#s3cmd put occurrence_polygon.csv.gz s3://biodiversityatlas.com/ec2/occurrence_polygon.csv.gz
#s3cmd put geo_region.csv.gz s3://biodiversityatlas.com/ec2/geo_region.csv.gz

#Backup the created Database to finish
pg_dump -Upostgres -hlocalhost processdb > processdbresult.sql
gzip processdbresult.sql
#s3cmd put processdb.sql.gz s3://biodiversityatlas.com/ec2/processdb.sql.gz
echo "The processdb database is backup and ready at processdb.sql.gz"
echo ""
echo "FINISHED!"

exit 0