# a facebook group source
# TODO: prevent duplicates when new comments are added
# TODO: fix auth
class FacebookGroupSource < Source
  MAX_PER_FETCH = 25
  
  def fetch
    super
    
    fetched = 0
    done = false
    # this is the proper way, but not working
    #token = FGraph.oauth_access_token('283815398303966', '6a40dd6a76e588d8850f2af385cd26bf', :type => 'client_cred')
    token = configatron.facebook_access_token
    params = {:access_token => token}
    
    until done do
      begin
        results = FGraph.object_feed(group_id, params) 
        
        # loop over posts, consuming them, until the updated date is older than our last crawl date, or we reach the limit
        results.each do |r|
          # ensure 'updated_at'
          r['updated_at'] = Time.parse(r['updated_time'] || r['created_time'])

          if fetched >= MAX_PER_FETCH || (!pulled_at.nil? && r['updated_at'] <= pulled_at)
            done = true
            break
          end

          # enqueue and increment counter
          enqueue_post(r)
          fetched += 1

          # get comments
          if r['comments']['count'] > 0
            if r['comments']['data']
              r['comments']['data'].each do |c|
                enqueue_post(c) if pulled_at.nil? || Time.parse(c['created_time']) > pulled_at
                fetched += 1
              end
            end
          end

        end
        
        # prepare to get next set of posts
        params = results.next_options
        
      rescue FGraph::FacebookError, FGraph::OAuthError
        # add error message as source event
        add_error($!.to_s)
        done = true
      end
    end
    
    # save crawl date for next run
    self.pulled_at = Time.now
    save
    
    update_deadline
    
    return fetched
  end
  
  def default_update_freq
    30
  end
  
  # saves a single post to file
  def enqueue_post(post)
    # build url
    post['id'].match(/^(\d+)_(\d+)(_\d+)?$/)
    url = "http://facebook.com/groups/#{$1}/?id=#{$2}"
    
    # collect parameters and save as YAML
    params = {:url => url, :content => post['message'], 
      :title => post['name'], :authored_at => Time.parse(post['created_time']), :orig_id => post['id'],
      :author => post['from']['name']
    }
    enqueue(params[:orig_id], YAML::dump(params))
  end
  
  # parses a saved post into a report
  def parse_one
    # get the YAML
    yaml = dequeue
    return false if yaml.nil?

    # decode
    attribs = YAML::load(yaml)
    
    # create the report
    create_report(attribs)
    
    return true
  end

  private    
    # pulls the group ID from the URL entered by the user
    def group_id
      # don't keep computing this over and over
      return @group_id if @group_id
      
      # try to apply regexp to url
      add_error("No URL") and return 0 unless url
      add_error("Invalid URL") and return 1 unless url.match(/\/([\d\w]+)\/?$/)
      @group_id = $1
    end
end