require 'net/http'

begin
  require 'xml'
rescue LoadError
  raise "Couldn't require 'xml.'  Try installing the libxml-ruby gem (which requires the libxml2 development package to be installed on your local system)."
end

begin
  require 'zip/zip'
rescue LoadError
  raise "Couldn't require 'zip/zip.'  Try installing the rubyzip gem."
end

# a concrete Backup implementation for backing up Basecamp data
# NOTE: This is not complete, but it does something... SSL access is NOT working. Basecamp API is spotty and incomplete.
class BasecampBackup < Backup
  attr_meta :username, :password, :url, :use_ssl

  # Get the response from the URL and follow redirects for 10 levels.
  def fetch(uri_str, limit = 10)
    raise ArgumentError, 'HTTP redirect too deep' if limit == 0

    response = Basecamp.connection.post(uri_str, '', "Content-Type" => "application/xml")

    # Check for redirects and do it again if we find one.
    case response
      when Net::HTTPSuccess     then response
      when Net::HTTPRedirection then fetch(response['location'], limit - 1)
      else
        raise Exception.new(response.error!)
    end
  end

  def run
    Basecamp::establish_connection!(self.url, self.username, self.password, self.use_ssl)
    
    base_dir = File.join(RAILS_ROOT, 'tmp', 'basecamp')
    FileUtils.rm_rf(base_dir)
    FileUtils.mkdir_p(base_dir)

    # Write out all projects, one of the base required items
    File.open(File.join(base_dir, "projects.xml"), "w+") do |f|
      f << fetch("/projects.xml").body
    end

    xml = File.read(File.join(base_dir, "projects.xml"))
    parser, parser.string = XML::Parser.new, xml
    doc = parser.parse

    projects_dir = File.join(base_dir, 'projects')
    FileUtils.mkdir_p(projects_dir)

    # Use the project as the starting point for retrieving data
    doc.find("//projects/project/id").each do |id|
      proj_dir = File.join(projects_dir, id.content)
      FileUtils.mkdir_p(proj_dir)

      archive = fetch("/projects/#{id.content}/posts/archive.xml").body
      parser, parser.string = XML::Parser.new, archive
      doc2 = parser.parse

      File.open(File.join(proj_dir, "posts.xml"), "w+") do |f|
        f << %Q{<posts type="array">}
        doc.find("//posts/post/id").each do |id|
          f << fetch("/posts/#{id.content}.xml").body
        end
        f << "</posts>"
      end

      xml = File.read(File.join(proj_dir, "posts.xml"))
      parser, parser.string = XML::Parser.new, xml
      doc2 = parser.parse

      posts_dir = File.join(proj_dir, 'posts')
      FileUtils.mkdir_p(posts_dir)

      doc2.find("//posts/post/id").each do |post_id|
        post_dir = File.join(posts_dir, id.content)
        FileUtils.mkdir_p(post_dir)

        File.open(File.join(post_dir, "comments.xml"), "w+") do |f|
          f << fetch("/posts/#{post_id.content}/comments.xml").body
        end
      end

      File.open(File.join(proj_dir, "categories.xml"), "w+") do |f|
        f << fetch("/projects/#{id.content}/categories.xml").body
      end

      # 403 FORBIDDEN
      # File.open(File.join(proj_dir, "todo_lists.xml"), "w+") do |f|
      #   f << fetch("http://themasterdebater.projectpath.com/projects/#{id.content}/todo_lists.xml").body
      # end
      # 
      File.open(File.join(proj_dir, "milestones.xml"), "w+") do |f|
        f << fetch("/projects/#{id.content}/milestones/list.xml").body
      end

      # 403 FORBIDDEN
      # File.open(File.join(proj_dir, "time_entries.xml"), "w+") do |f|
      #   f << fetch("http://themasterdebater.projectpath.com/projects/#{id.content}/time_entries.xml").body
      # end
    end

    # Companies are required before people
    File.open(File.join(base_dir, "companies.xml"), "w+") do |f|
      f << fetch("/companies.xml").body
    end

    xml = File.read(File.join(base_dir, "companies.xml"))
    parser, parser.string = XML::Parser.new, xml
    doc, projects = parser.parse, []

    companies_dir = File.join(base_dir, 'companies')
    FileUtils.mkdir_p(companies_dir)

    # Use the companies as the starting point for retrieving people
    doc.find("//companies/company/id").each do |id|
      company_dir = File.join(companies_dir, id.content)
      FileUtils.mkdir_p(company_dir)

      # Posts are standalone
      # API DOES NOT WORK
      # File.open(File.join(company_dir, "people.xml"), "w+") do |f|
      #   f << fetch("http://themasterdebater.projectpath.com/contacts/people/#{id.content}.xml").body
      # end
    end
    
    `cd #{base_dir}/.. && zip -r basecamp basecamp`
  end
end
