#!/usr/bin/python2.6
import logging
import logging.handlers
import traceback
import web
import sys
import httplib
import urllib
import base64

#logpath = 'log.txt'
#twitterAPI = "http://twitter.com/"
#rootLogger = logging.getLogger('')
#rootLogger.setLevel(logging.DEBUG)
#formatter = logging.Formatter(fmt='%(asctime)s %(levelname)-8s : %(pathname)s (%(lineno)d) --- %(message)s', datefmt='%d %b %Y %H:%M:%S')
#fileHandler = logging.FileHandler(logpath)
#fileHandler.setFormatter(formatter)
#rootLogger.addHandler(fileHandler)

class BaseProxy(object):
  
  required_header = ['Authorization',
                     'User-Agent',
                     'X-Twitter-Client',
                     'X-Twitter-Client-URL',
                     'X-Twitter-Client-Version']
  proxyaddress = ''

  def __init__(self):
    data = ''
    self.proxyaddress = 'twitter.com'
    fd = web.ctx.env['wsgi.input']
    if self._is_post_request():
      #while 1:
      #  chunked = fd.read(10000)
      #  if not chunked:
      #    break
      #  data += chunked
      web.ctx.data = web.webapi.data()
    self.callback_specified = False
  
  def _is_post_request(self):
    if web.ctx.env['REQUEST_METHOD'].upper() != 'POST':
	    return False
    content_type = web.ctx.env.get('CONTENT_TYPE', 'application/x-www-form-urlencoded')
    return (content_type.startswith('application/x-www-form-urlencoded'  or content_type.startswith('multipart/form-data')))

  def _get_headers(self):
    headers = {}

    for header in self.required_header:
      # need caching or memoize trick to remember mapped key
      header_key = 'HTTP_' + header.replace('-','_').upper()
      if web.ctx.environ.has_key(header_key):
        headers[header] = web.ctx.environ[header_key]
    return headers

  def sendoutput(self, result):
    content = result.read()
    if result.status == 200:
      web.ctx.headers = result.getheaders()
      if len(content.strip()) > 0:
        filtered = content
        web.header('content-length', len(filtered))
        #web.webapi.output(filtered)
        return filtered
    else:
      web.ctx.headers = result.getheaders()
      web.ctx.status = str(result.status)+' '+result.reason
      #web.webapi.output(content)
      return content

  def GET(self, params):    
    result = None
    headers = self._get_headers()
    target_url = '/' +params 
    if web.ctx.environ.get('QUERY_STRING', None):
      target_url += '?'+web.ctx.environ['QUERY_STRING']
    webin = web.input()
    if webin.has_key('callback'):
      self.callback_specified = webin.callback
    httpcon = httplib.HTTPConnection(self.proxyaddress, 80)
    try:
      httpcon.request('GET', target_url, headers=headers)
      twitter_response = httpcon.getresponse()
      return self.sendoutput(twitter_response)
    except Exception, inst:
      if result:
        logging.error("%s \n\n %s \n\n %s \n\n %s \n\n %s" % (target_url, str(inst), headers, web.data(), twitter_response.read()))		
      else:
        logging.error("%s \n\n %s \n\n %s \n\n %s" % (target_url, str(inst), headers, web.data()))
      web.internalerror()



  def POST(self, params):
    result = None
    target_url = '/' +params 
    headers = self._get_headers()
    httpcon = httplib.HTTPConnection(self.proxyaddress, 80)
    try:
      httpcon.request('POST', target_url, headers=headers, body=web.data())
      twitter_response = httpcon.getresponse()
      return self.sendoutput(twitter_response)
    except Exception, inst:
      if result:
        logging.error("%s \n\n %s \n\n %s \n\n %s \n\n %s" % (target_url, str(inst), headers, web.data(), twitter_response.read()))
      else:
        logging.error("%s \n\n %s \n\n %s \n\n %s" % (target_url, str(inst), headers, web.data()))
      web.internalerror()

urls  = (

    '/api/(.*)', 'BaseProxy',
    )

def runfcgi(func, addr=('localhost', 8000)):
    """Runs a WSGI function as a FastCGI server."""
    import flup.server.cgi as flups
    return flups.WSGIServer(func).run()
  
web.wsgi.runfcgi = runfcgi

#web.webapi.internalerror = web.debugerror
if __name__ == "__main__":
    app = web.application(urls, globals())
    app.run()
