#!/usr/bin/env python

import sys
import subprocess

class URLFetcher:
  def __init__(self, session):
    self._session = session
  
  def fetch(self):
    wgetpath = self._session.getConfig().getWgetPath()
    datadir = self._session.getConfig().getDataDir()
    useragent = self._session.getConfig().getUserAgent()
    source = self._session.getSource()
    domains = source.getDomains()
    excludeext = source.getExcludeExt()
    wait = source.getWait()
    for url in source.getUrls():
      args = [wgetpath]
      args.append('-r')				# recursive
      args.append('--protocol-directories')	# use dirs like data/ktl/http/www.ktl.fi/portal/index.html
      args.append("-P%s/%s" % (datadir, source.getName())) # store docs under $datadir/$source
      args.append("-U%s" % useragent)		# use the configured User-Agent string
      args.append("-linf")			# infinite recursion depth
      if domains:
        args.append("-D%s" % domains)		# limit to configured domains
      if excludeext:
        args.append("-R%s" % excludeext)
      if wait != 0:
        args.append("-w%d" % wait)
      args.append(url)
      subprocess.call(args)
