#!/usr/bin/env python2.5
# -*- coding: UTF-8 -*-  
# 
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.api import urlfetch 
import logging 

import re
from  milonpy import NMscrap
BeautifulSoup=NMscrap.BeautifulSoup


def goFetch(url,method="GET",  headers={}):  # make it async http://code.google.com/appengine/docs/python/urlfetch/asynchronousrequests.html
     
    #logging.info ("goFetch.getUrl="+ utrp.getUrl())
    rpc = urlfetch.create_rpc(deadline=10)
    urlfetch.make_fetch_call(rpc, url, payload=None, method=method, headers=headers, allow_truncated=False, follow_redirects=False) 
     
    try:
        result = rpc.get_result()
        if result.status_code == 200:
            logging.info ("goFetch sucesfful"+str(result.status_code))              # ...
    except urlfetch.DownloadError:
            logging.error ("goFetch failed:"+str(result.status_code))
    return result


class HanlerScrap(webapp.RequestHandler): 
    def get(self, artg):
        result=goFetch("http://www.hnms.gr/hnms/greek/index_html")
        content=result.content
        #headers=result.headers
        #status_code=result.status_code
        #content=content.replace("\n", "")
        content=re.sub("<td>.*?Ομάδα έργου.*?</td>","",content) 
        self.response.out.write(content) 
        return
        #soup=tables = BeautifulSoup(content)
        #self.response.out.write( soup.prettify())
         
        

def main(): 
    application = webapp.WSGIApplication([('/scrap(.*?)', HanlerScrap),  ],
                                       debug=True)
    util.run_wsgi_app(application)


if __name__ == '__main__':
    main()
