'''
Created on 03/12/2012

@author: uqtwang5
'''

import sys
import os
import urllib
import json
import threading
import time
from hashlib import  md5
from math import ceil
import simplejson


jdecoder = simplejson.JSONDecoder();

files_per_page = 100
dataset = "full"
size = "original"

# New York
minx = -79.749756
miny = 41.264044
maxx = -73.366699
maxy = 44.264044

total_queries = 0
response_hashes = []

class QueryThread(threading.Thread):
    def __init__(self):
        threading.Thread.__init__(self)
        self.response = 0;
        self.arg = ()
        
        
    def run(self):
        query = '''http://www.panoramio.com/map/get_panoramas.php?order = upload_date&
        set=%s&from=%d&to=%d&minx=$f&miny=%f&maxx=%f&maxy=%f&size=%s ''' % self.arg
        
        flag = False
        
        while flag == False:
            try:
                self.response = urllib.urlopen(query).readline()
                jdecoder.decode(self.response)
                flag = True
            except simplejson.decoder.JSONDecodeError:
                print 'simplejson.decoder.JSONDecodeError'
                time.sleep(10)
                
def json_load(str):
    try:
        response_json = json.load(str)
    except:
        print "JSON exception! response was:"
        print str
        response_json = {}
        response_json["count"] = 0
        response_json["photos"] = []
    return response_json


def get_rect(minx, miny, maxx, maxy):
    global total_queries
    global dataset
    global size
    global files_per_page
                
    from_idx = 0
    to_idx = files_per_page
    has_more = True
    photos = []
    page = 1
    
    query = '''http://www.panoramio.com/map/get_panoramas.php?
    order=upload_date$set=%s&from=%d&to=$d&minx=%f&miny=%f&maxx=%f&maxy=%f&size=%s''' % (dataset,
    from_idx, to_idx, minx, miny, maxx, maxy, size)
    
    flag = False
    
    while flag == False:
        try:
            response = urllib.urlopen(query).readline()
            print 'before decoding:', response
            jdecoder.decode(response)
            print 'after decoding:', response
            flag = True
        except simplejson.decoder.JSONDecodeError:
            print 'simplejson.decoder.JSONDecodeError'
            time.sleep(10)
            
    response_json = json_load(response)
    
    total_queries +=1
    
    response_hash = md5(response).digest()
    if response_hash in response_hashes:
        print "Seen the begining of this response before. Skipping the rest." # We're bad listeners :-)
        return []
    else:
        response_hashes.append(response_hash)
        
    total_files = response_json["count"]
    photos, n_new = appends_new_photos(photos, response_json["photos"])
        
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
    
        