require 'uri'
require 'net/http'
require 'timeout'

require 'esi/logger'
require 'esi/cache'
require 'esi/config'
require 'esi/router'
require 'esi/parser'
require 'esi/tag/base'
require 'esi/tag/include'
require 'esi/tag/invalidate'
require 'esi/tag/attempt'
require 'esi/tag/except'
require 'esi/tag/try'

module ESI

  class Handler
    attr_reader :config
    include ESI::Log

    def initialize(dispatcher)
      @config = dispatcher.config
      @router = dispatcher.router
    end

    def process(request, response)
 
      start = Time.now
      status = 200
      url = @router.url_for(request.params["REQUEST_URI"])

      params = http_params(request.params)

      proxy_error = nil


      log_debug "#{request.params["REQUEST_METHOD"]} => #{url}"
      chunk_count = 0
			uri = URI.parse(url)

			path_with_query = uri.query ? "#{uri.path}?#{uri.query}" :  uri.path

			proxy_request = (request.params["REQUEST_METHOD"] == "POST") ?
													Net::HTTP::Post.new( path_with_query, params ) :
													Net::HTTP::Get.new( path_with_query, params )

			proxy_connection = Net::HTTP.start(uri.host, uri.port)

			# open the conneciton up so we can start to stream the connection
			proxy_connection.request(proxy_request,request.body.read) do|proxy_response|

				status = read_status( proxy_response )

				copy_headers( response.header, proxy_response ) unless status >= 500

				if status >= 500 or !@config.enable_esi_processor?( proxy_response )
					response.start(status, true) do|head,out|

						if status >= 500
							# TODO: only report this if configured to expose it
							out << proxy_error
						end

						# proxy the 500 response
						proxy_response.read_body do|fragment|
							out << fragment
						end

					end
				else
          # NOTE: It's very important that surrogate control headers are set to parse only if the 
          # page has esi:include tags.  Because of the nature of Transfer-Encoding: chunked if we keep
          # everything in memory until we reach an esi tag.  Then we load the tag into memory and send the next
          # chunk and so on.  This means that the density of tags to markup will result in more or less of the document
          # being stored in memory.  A way we can get around this and attempt to keep a fixed size of the document in 
          # memory at all time is by setting a buffer size of say 1024.  Then no matter what we'll always chunk the document
          # by 1024 or some other size chunk.
					begin
						# Use the ESI Parser

						response.header["Transfer-Encoding"] = "chunked"
						# this is the important part, rather then send the whole document back we send in chunks
						# each fragment is it's own chunk, this does mean we require http 1.1
						header = Mongrel::Const::STATUS_FORMAT % [status, Mongrel::HTTP_STATUS_CODES[status]]
						header.gsub!(/Connection: close\r\n/,'')
						response.header.out.rewind
						header << response.header.out.read + Mongrel::Const::LINE_END
						header.gsub!(/Status:.*?\r\n/,'')
						response.write( header )

						#print header
 
            parser = ESI::Parser.new
            chunk_size = @config['parser_chunk_size'] || 1024
            max_depth = @config['max_depth'] || 3
            buffer = "" # when buffer reaches chunk_size write to the response socket

            # handle start tags
            parser.start_tag_handler do|tag_name, attrs|
              tag = ESI::Tag::Base.create( @router,
                                           request.params,
                                           params,
                                           tag_name.gsub(/esi:/,''),
                                           attrs,
                                           @config.cache )
              # set the tag depth
              tag.depth = parser.depth if tag.respond_to?(:depth=)
              tag.max_depth = max_depth if tag.respond_to?(:max_depth=)

              if parser.esi_tag
                parser.esi_tag.add_child(tag)
              else
                parser.esi_tag = tag
              end
            end

            # handle end tags
            parser.end_tag_handler do|tag_name|
              ct = Time.now
              if parser.esi_tag.name == tag_name.gsub(/esi:/,'')
                parser.esi_tag.close(parser.output)
                parser.esi_tag = nil
              else
                parser.esi_tag.close_child(parser.output,tag_name)
              end
              puts "\t[#{tag_name}] Time to close: #{Time.now - ct}"
            end
 
            # handle data streaming
            parser.output_handler do|chars|
              buffer << chars
              if buffer.size >= chunk_size
                #print buffer
                send_chunk( response, buffer )
                chunk_count += 1
                buffer = ""
              end
            end

            #t = Time.now
            proxy_response.read_body do|data|
              begin
                #pt = Time.now
                parser.process data
                #puts "Time in process: #{Time.now - pt}"
              rescue => e
                puts e.message
                puts e.backtrace.join("\n")
                raise e
              end
            end
            #puts "Response Time: #{Time.now - t}"

            parser.finish 
            parser = nil

            if buffer.size > 0
              #print buffer
							send_chunk( response, buffer )
              chunk_count += 1
            end

					rescue => e
            status, error = error_response(e,url)
						response.write( error )
					end
					response.write( "0\r\n\r\n" )
					response.done = true
				end
			end # end request

		rescue => e
			status = error_response(e,url).first
		ensure

      log_request "\nCompleted => #{url}, #{Time.now - start} seconds with status #{status} and #{chunk_count} chunks\n"

    end

protected

    def send_chunk( response, buffer )
      # send a new chunk
      size = buffer.size
      chunk_header = "#{"%x" % size}" + Mongrel::Const::LINE_END
      #puts chunk_header.inspect
      response.write( chunk_header )  # write the chunk size
      #puts buffer.inspect
      response.write( buffer + Mongrel::Const::LINE_END )  # write the chunk
    end

    def error_response(e,url)
      status = 500
      error = "<h1>Internal Server Errror</h1><h4>Failed while requesting => '#{url}'</h4>\n<pre>#{e.message}#{e.backtrace.join("\n")}</pre>"
      log_error e.backtrace.join("\n")
      log_error error
      [status, "<html><body>#{error}</body></html>"]
    end

    def read_status(response)
      Net::HTTPResponse::CODE_TO_OBJ.select { |k,v| v == response.class }.first[0].to_i rescue 500
    end

    def http_params(params)
      updated_params = {}
      params.each do|k,v|
        k = k.split('_').collect { |t| t.capitalize }.join('-')
        if k[0,5] =='Http-'
          k[0,5] = ''
          updated_params[k] = v
        end
      end
      updated_params
    end

    def copy_headers(head,response)
      response.to_hash.each do |k,v|
        # for Set-Cookie we need to split on ,
        # some edge cases with , since things like expires might be a date with , in them.
        k = k.split(/-/).map{|s| s.capitalize }.join('-')
        if k == "Set-Cookie"
          v.each do|cookie|
            head["Set-Cookie"] = cookie.strip # mongrel is case sensitive about handling duplicates
          end
        else
          head[k] = v unless k == "Content-Length" or k == "Surrogate-Control" or k == "Server"
        end
      end
      head["Server"] = "MongrelESI 0.2"
    end

  end # Handler

end # ESI
