#!/usr/bin/env python
# -*- coding: utf-8 -*- 
#############################################################################
# GMangas
# Copyright (C) 2008  Ferraro Luciano (aka Lux) <luciano.ferraro@gmail.com>
# 
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# 
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
# 
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.
############################################################################

import os
import sys
import time
import inspect
import thread
import urllib2
from urllib import FancyURLopener
from BeautifulSoup import BeautifulSoup as bs

downloads_list = {}
downloaded_data = {}
URL = urllib2.build_opener()
URL.add_headers = [("User-agent", "Opera/9.25 (Windows NT 5.1; U; en)")]

class Error(Exception):
    def __init__(self, value):
        self.value = value
    def __str__(self):
        return repr(self.value)


def assoc(key, alist):
    for k,val in alist:
        if key == k:
            return val

def pushnew(val, ll):
    try:
        ll.remove(val)
    except ValueError:
        pass
    ll.append(val)

def rmassoc(key, alist):
    for k,val in alist:
        if key == k:
            return alist.remove((key,val))

def download_cback(url, data):
    downloaded_data[url] = data

def download_page(url):
    global downloads_list
    if downloaded_data.has_key(url):
        return downloaded_data[url]
    elif downloads_list.has_key(url):
        downloads_list[url].append(lambda data: download_cback(url, data))
        downloaded_data[url] = None
        while not downloaded_data[url]:
            time.sleep(0.1)
        return downloaded_data[url]
    print "Downloading %s" % url
    downloads_list[url] = []
    data = URL.open(url).read()
    if downloads_list.has_key(url):
        for cback in downloads_list[url]:
            thread.start_new(cback, (data,))
        del downloads_list[url]
    return data

def get_path_from_filename(filename):
    path = os.path.join(os.path.dirname(inspect.getfile(sys._getframe(1))), filename)
    return os.path.abspath(path)[:-len(filename)]

def create_dir(path):
    try:
        os.makedirs(path)
    except OSError:
        return False

def prn(stuff, label=""):
    print label,">>>",repr(stuff)
    return stuff
