#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fileencoding=utf-8

from pyquery import PyQuery as pq
from urllib2 import Request, urlopen
from pyscript.core.url import urlVerify, realUrl, urlGetContents 

class Collect(object):

    def __init__(self, **kargs):
       self.headers = kargs.get('header', {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.52 Safari/537.36'});
       self.data = None 
       self.body = None
       self.pq = pq
       self.timeout = kargs.get('timeout', 30)
       self.retry = kargs.get('retry', 3)
       self.sleep = kargs.get('sleep', 5)


    def fopen(self, url=None):
        if url:
            self.body = urlGetContents(url, headers=self.headers, timeout=self.timeout, sleep=self.sleep, retry=self.retry)

            if self.body:
               self.__run(url)


    def __run(self, url = None):
        self.data = self.parseData(url) 


    def parseData(self, baseUrl = None):
        pass


    def getData(self):
        return self.data


