import time
import urllib2
import threading
import optparse

"""
Thread that given a URL, will make a number of requests, timing each.
"""
class RequestWorker(threading.Thread):
  
  """
  Initialise the class.
  """
  def __init__(self, url, num_requests):
    threading.Thread.__init__(self)
    self.url = url
    self.num_requests = num_requests
    self.errors = 0
    self.request_times = []
	
  """
  Make the requests and gather data.
  """
  def run(self):
    for r in range(self.num_requests):
      try:
        self.request_times.append(self.request())
      except urllib2.URLError:
        self.errors = self.errors + 1
	
  """
  Gets the given url and returns how long it took.
  """
  def request(self):
	  
    # Store the start time
    start = time.time()
    
    # Open the file and read all the data
    file = urllib2.urlopen(self.url)
    content = file.readlines()
  
    # Work out the elapsed time
    end = time.time()
    elapsed = end - start
    return elapsed


def average(vals):
  return round(reduce(lambda x, y: x + y, vals)/len(vals), 5)

if __name__ == "__main__":

  # Set up the command line option parser
  usage = "benchmark.py --num-requests <count>"
  parser = optparse.OptionParser(usage)
  
  parser.add_option("-n", "--num-requests", dest = "num_requests", type = "int",
  		help = "Number of requests per URL.", default = 10, action = "store")
  
  parser.add_option("-f", "--url-file", dest = "url_file", type = "string",
		help = "File containing a list of URLs to test.", default = "", action = "store")
	
  # Psarse the command line options
  options, args = parser.parse_args()
  
  if options.url_file == "":
    print "No URL file supplied."
    exit()

  # Open the URL file
  f = open(options.url_file)
  urls = f.readlines()

  # Create worker threads
  threads = []
  for url in urls:
    t = RequestWorker(url, options.num_requests)
    threads.append(t)

  # Start all the workers
  for t in threads:
    t.start()

  # Wait for all the threads to finish
  for t in threads:
    t.join()

  # Print results
  averages = []
  for t in threads:
    avg = average(t.request_times)
    print t.url + " averaged " + str(avg) + "s per request, for " + str(t.num_requests) + " requests, with " + str(t.errors) + " errors."
    averages.append(avg)

  print "Overall average: " + str(average(averages)) + "s per request."
    
