#!/usr/bin/env python2.6

# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

"""Fetches all the submissions in a series.

This iteratively fetches all the submissions from all the topics in a series.
It then dumps the information to a CSV file.
"""

__author__ = "dcoker@google.com (Doug Coker)"

import csv
import getopt
import httplib
import sys
import json


def HttpGetJson(host, path):
  """Returns the parsed JSON responses from http://host/path."""
  conn = httplib.HTTPConnection(host)
  conn.request("GET", path)
  response = conn.getresponse()
  status, reason = response.status, response.reason
  assert status == 200, ("%d: %s -- did you specify a valid series_id? "
                         "req=%s") % (status, reason, path)
  body = response.read()
  conn.close()
  return json.loads(body)["data"]


def Sanitize(data):
  def Clean(s):
    return s.replace("\n", " ").replace("\r", " ")
  return [Clean("%s" % x).encode("utf-8") for x in data]


def main(argv):
  """Parses options, makes requests, prints CSV to stdout."""
  try:
    opts, _ = getopt.getopt(argv[1:], "s:h:n:",
                            ["series_id=", "host=", "num_per_request="])
  except getopt.GetoptError, err:
    sys.stderr.write("%s\n" % err)
    sys.exit(2)

  # default values
  series_id = None
  host = "www.googleapis.com"
  num_per_request = 100

  for o, a in opts:
    if o in ("-s", "--series_id"):
      series_id = int(a, a.startswith("0x") and 16 or 10)
    elif o in ("-h", "--host"):
      host = a
    elif o in ("-n", "--num_per_request"):
      num_per_request = int(a)
    else:
      assert False, "unhandled option"

  assert series_id > 0
  assert num_per_request > 0

  handler = csv.writer(sys.stdout)
  handler.writerow((
      "seriesId",
      "topicId",
      "submissionId",
      "createdInMs",
      "authorDisplayName",
      "authorLocation",
      "text",
      "plusVotes",
      "noneVotes",
      "minusVotes",
      "videoUrl"))

  topics = HttpGetJson(host, "/moderator/v1/series/%d/topics" % series_id)
  for topic in topics["items"]:
    topic_id = int(topic["id"]["topicId"])
    num_fetched_this_topic = 0
    num_fetched_this_fetch = None
    while num_fetched_this_fetch != 0:
      submissions = HttpGetJson(host,
                                "/moderator/v1/series/%d/topics/%d/submissions"
                                "?start-index=%d"
                                "&max-results=%d"
                                "&sort=DATE_SUBMITTED_ASCENDING"
                                "" % (series_id, topic_id,
                                      num_fetched_this_topic,
                                      num_per_request))
      if not submissions or "items" not in submissions:
        break

      num_fetched_this_fetch = len(submissions["items"])
      num_fetched_this_topic += num_fetched_this_fetch

      for submission in submissions["items"]:
        handler.writerow(Sanitize(
            (submission["id"]["seriesId"],
             topic_id,
             submission["id"]["submissionId"],
             submission["created"],
             submission.get("attribution", {}).get("displayName", ""),
             submission.get("attribution", {}).get("location", ""),
             submission["text"],
             submission["counters"]["plusVotes"],
             submission["counters"]["noneVotes"],
             submission["counters"]["minusVotes"],
             submission.get("videoUrl", ""))))


if __name__ == "__main__":
  main(sys.argv)
