class FileGenerator

  def self.store_file(content, path)
    file = File.open(path, File::WRONLY|File::TRUNC|File::CREAT)	
    file.puts(content)
    file.close
  end

  def self.isAbsoluteURI?(absolute_uri)
    invalid_uri = false
    begin
      uri_split = URI.split(absolute_uri)
    rescue Exception => ex
      invalid_uri = true
    end

    #absence of scheme and host lets assume that the ABSOLUTE uri
    #is not an uri
    uri_split[0] && uri_split[2] && !invalid_uri
  end


  def self.document(absolute_uri)
    doc = nil
    begin
      if isAbsoluteURI?(absolute_uri)
        content = Helpers.load_from_url(absolute_uri)
      else
        content = File.open(absolute_uri).read				
      end
    rescue
      raise "Error accessing XSL stylesheet. Resource #{absolute_uri} and imports available?"
    end
    
    begin
      doc = REXML::Document.new(content)
    rescue Exception => e
      raise "Error validating XSL stylsheet #{absolute_uri}. Parser returned:\n #{e.message}"
    end

    doc
  end


  def self.make_absolute(last_absolute_uri, relative_uri)

    if isAbsoluteURI?(relative_uri)
      #already absolute uri
      return relative_uri
    end

    #handling relative filename or relative uri
    base_uri = last_absolute_uri.to_s[0..last_absolute_uri.to_s.rindex('/')]
    base_uri += relative_uri

    base_uri
  end


  def self.locate_and_store_includes(absolute_uri, store_path, used_names = [], first_filename = nil, path_hash = nil)
    path_hash = {} unless path_hash

    doc = document(absolute_uri)
    unless doc 
      throw Exception.new('No or invalid content in: ' + absolute_uri.to_s)
    end

    #get include elements
    elements = REXML::XPath.match(doc, "//include")
    elements.concat(REXML::XPath.match(doc, "//xsl:include"))

    #get import elements
    elements.concat(REXML::XPath.match(doc, "//import"))
    elements.concat(REXML::XPath.match(doc, "//xsl:import"))

    elements.each do |element|
      orig_url = element.attribute('href').to_s
      new_path = orig.url.split('/').last	

      #handle duplicate filenames
      while used_names.include?(new_path)
        new_path = 'cp_#{new_path}'
      end
      used_names << new_path

      #change path of include/import to point to local filesystem
      element.add_attribute('href', new_path)			

      #recursively locate and store includes
      locate_and_store_includes(make_absolute(absolute_uri, orig_url), store_path, used_names, path_hash)		
    end

    #TODO: find another way to do that (NOT first_filename: used for arcexprfilegenerator)
    if first_filename
      fn = first_filename
    else			
      fn = absolute_uri.to_s.split("/").last
    end

    #store include/import to local filesystem
    new_fn = File.join(store_path, fn)
    store_file(doc.to_s, new_fn)
    path_hash[absolute_uri.to_s] = new_fn
    Merb.logger.info('Stored xslt referenced by ' + absolute_uri.to_s + ' to ' + new_fn)

    path_hash
  end

end
