#!/usr/bin/env python
#
# Copyright (C) 2007 Jason Kivlighn
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, #USA.

import sqlite3
import urllib2, urllib
import xml.dom.minidom
from xml import xpath
import time
import sys

VERBOSE=1
def debug(*args):
  if VERBOSE > 0:
    for x in args:
      sys.stdout.write(str(x))
      sys.stdout.write(" ")
    sys.stdout.write("\n")

keep_urlopen = urllib2.urlopen
def override_urlopen(url):
  time.sleep(5)
  return keep_urlopen(url)
urllib2.urlopen = override_urlopen

CHART_LIST = "http://ws.audioscrobbler.com/1.0/user/%s/weeklychartlist.xml"
ARTIST_CHART = "http://ws.audioscrobbler.com/1.0/user/%s/weeklyartistchart.xml?from=%d&to=%d"
TRACK_CHART = "http://ws.audioscrobbler.com/1.0/user/%s/weeklytrackchart.xml?from=%d&to=%d"
ALBUM_CHART = "http://ws.audioscrobbler.com/1.0/user/%s/weeklyalbumchart.xml?from=%d&to=%d"
NEIGHBORS = "http://ws.audioscrobbler.com/1.0/user/%s/neighbours.xml"
FRIENDS = "http://ws.audioscrobbler.com/1.0/user/%s/friends.xml"
PROFILE = "http://ws.audioscrobbler.com/1.0/user/%s/profile.xml"
TOP_ARTIST_TAGS = "http://ws.audioscrobbler.com/1.0/artist/%s/toptags.xml"
TOP_TRACK_TAGS = "http://ws.audioscrobbler.com/1.0/track/%s/%s/toptags.xml"

def getText(nodelist):
  rc = ""
  for node in nodelist:
    if node.nodeType == node.TEXT_NODE:
      rc = rc + node.data
  return rc

def tryGetElementText(xml,tagName):
  elements = xml.getElementsByTagName(tagName)
  if elements and elements[0]:
    return getText(elements[0].childNodes)
  else:
    return None

def fetch_user_profile(user):
  user = urllib.quote(user)

  try:
    debug("Downloading %s" % PROFILE % user)
    profile_conn = urllib2.urlopen(PROFILE % urllib.quote(user))
  except urllib2.URLError:
    sys.stderr.write("bad url")
    return None

  profile_xml = xml.dom.minidom.parse(profile_conn)
  url = getText(profile_xml.getElementsByTagName("url")[0].childNodes)
  realname = getText(profile_xml.getElementsByTagName("realname")[0].childNodes)
  registered = profile_xml.getElementsByTagName("registered")[0].getAttribute("unixtime")
  age = tryGetElementText(profile_xml,"age")
  gender = tryGetElementText(profile_xml,"gender")
  country = tryGetElementText(profile_xml,"country")
  playcount = getText(profile_xml.getElementsByTagName("playcount")[0].childNodes)
  avatar = getText(profile_xml.getElementsByTagName("avatar")[0].childNodes)
  icon = getText(profile_xml.getElementsByTagName("icon")[0].childNodes)
  
  return (url,realname,registered,age,gender,country,playcount,avatar,icon)

def fetch_weekly_user_data(user,data_range=(None,None),types=["artists"]):
  artist_data = []
  track_data = []
  album_data = []
  
  debug("Downloading %s" % CHART_LIST % user)
  chart_list_conn = urllib2.urlopen(CHART_LIST % urllib.quote(user))

  chart_list_xml = xml.dom.minidom.parse(chart_list_conn)
  for chart_el in chart_list_xml.getElementsByTagName("chart"):
    begin_ts = int(chart_el.getAttribute("from"))
    end_ts = int(chart_el.getAttribute("to"))
    
    if data_range[0] and begin_ts <= data_range[0]:
      continue
    
    if data_range[1] and begin_ts > data_range[1]:
      break
    
    if "artists" in types:
      debug("Adding artist data for the week of %s" % time.strftime("%b %d, %Y",time.gmtime(begin_ts)))
      debug("Downloading %s" % ARTIST_CHART % (user,begin_ts,end_ts))
      chart_conn = urllib2.urlopen(ARTIST_CHART % (urllib.quote(user),begin_ts,end_ts))
      chart_xml = xml.dom.minidom.parse(chart_conn)
      for artist_el in chart_xml.getElementsByTagName("artist"):
        name = getText(artist_el.getElementsByTagName("name")[0].childNodes)
        playcount = getText(artist_el.getElementsByTagName("playcount")[0].childNodes)
        
        debug("  adding artist: "+ name.encode("utf8"))
        artist_data.append((begin_ts,end_ts,name,playcount))

    if "tracks" in types:
      debug("Adding track data for the week of %s" % time.strftime("%b %d, %Y",time.gmtime(begin_ts)))
      debug("Downloading %s" % TRACK_CHART % (user,begin_ts,end_ts))
      chart_conn = urllib2.urlopen(TRACK_CHART % (urllib.quote(user),begin_ts,end_ts))
      chart_xml = xml.dom.minidom.parse(chart_conn)
      for artist_el in chart_xml.getElementsByTagName("track"):
        name = getText(artist_el.getElementsByTagName("name")[0].childNodes)
        playcount = getText(artist_el.getElementsByTagName("playcount")[0].childNodes)
        artist = getText(artist_el.getElementsByTagName("artist")[0].childNodes)

        debug("  adding track:", name.encode("utf8"))
        track_data.append((begin_ts,end_ts,(artist,name),playcount))
      
    if "albums" in types:
      debug("Adding album data for the week of %s" % time.strftime("%b %d, %Y",time.gmtime(begin_ts)))
      debug("Downloading %s" % ALBUM_CHART % (user,begin_ts,end_ts))
      chart_conn = urllib2.urlopen(ALBUM_CHART % (urllib.quote(user),begin_ts,end_ts))
      chart_xml = xml.dom.minidom.parse(chart_conn)
      for artist_el in chart_xml.getElementsByTagName("album"):
        name = getText(artist_el.getElementsByTagName("name")[0].childNodes)
        playcount = getText(artist_el.getElementsByTagName("playcount")[0].childNodes)
        
        debug("  adding album:", name.encode("utf8"))
        album_data.append((begin_ts,end_ts,name,playcount))
      
  return (artist_data, track_data, album_data)

def fetch_neighbors(user):
  try:
    debug("Downloading %s" % NEIGHBORS % user)
    neighbors_conn = urllib2.urlopen(NEIGHBORS % urllib.quote(user))
  except urllib2.URLError:
    sys.stderr.write("bad url")
    return []
  
  neighbors = []
  
  neighbors_xml = xml.dom.minidom.parse(neighbors_conn)
  for chart_el in neighbors_xml.getElementsByTagName("user"):
    user = chart_el.getAttribute("username")
    neighbors.append(user)
  
  return neighbors
  
def fetch_friends(user):
  try:
    debug("Downloading %s" % FRIENDS % user)
    friends_conn = urllib2.urlopen(FRIENDS % urllib.quote(user))
  except urllib2.URLError:
    sys.stderr.write("bad url")
    return []
  
  friends = []
  
  friends_xml = xml.dom.minidom.parse(friends_conn)
  for chart_el in friends_xml.getElementsByTagName("user"):
    user = chart_el.getAttribute("username")
    friends.append(user)
  
  return friends

def fetch_artist_tags(artist_info):
  print "Fetching artist tags..."

  artist_tag_info = {}

  i = -1
  for begin_ts,end_ts,artist,playcount in artist_info:
    i += 1

    if artist_tag_info.has_key(artist):
      continue

    try:
      debug("(%d%%) Downloading %s" % (100.0*i/len(artist_info),TOP_ARTIST_TAGS % (artist.encode("utf8"))) )
      artist_tags_conn = urllib2.urlopen(TOP_ARTIST_TAGS % (urllib.quote(urllib.quote(artist.encode("utf8")).replace("/","%2F"))))
    except urllib2.URLError:
      sys.stderr.write("bad url")
      artist_tag_info[artist] = []
      continue
    except:
      sys.stderr.write("oops")
      artist_tag_info[artist] = []
      continue

    artist_tags_xml = xml.dom.minidom.parse(artist_tags_conn)
    artist_tag_info[artist] = [getText(name_el.childNodes) for name_el in xpath.Evaluate("//tag[count > 15]/name",artist_tags_xml)]

  return artist_tag_info

def fetch_track_tags(track_info):
  print "Fetching track tags..."

  track_tag_info = {}

  i = -1
  for begin_ts,end_ts,(artist,name),playcount in track_info:
    i += 1

    if track_tag_info.has_key(name):
      continue

    try:
      debug("(%d%%) Downloading %s" % (100.0*i/len(track_info),TOP_TRACK_TAGS % (artist.encode("utf8"),name.encode("utf8"))) )
      track_tags_conn = urllib2.urlopen(TOP_TRACK_TAGS % (urllib.quote(urllib.quote(artist.encode("utf8"))),urllib.quote(urllib.quote(name.encode("utf8")))))
    except urllib2.URLError:
      sys.stderr.write("bad url")
      track_tag_info[name] = []
      continue
    except:
      sys.stderr.write("oops")
      track_tag_info[name] = []
      continue

    track_tags_xml = xml.dom.minidom.parse(track_tags_conn)
    track_tag_info[name] = [getText(name_el.childNodes) for name_el in xpath.Evaluate("//tag[count > 15]/name",track_tags_xml)]

  return track_tag_info

def normalize_tags(tag_dict):
  return tag_dict

def write_weekly_user_data(db_conn,user,data_range=(None,None),types=["artists","tracks","albums","track_tags","artist_tags"]):
  if "track_tags" in types and "tracks" not in types:
    types.append("tracks");
  if "artist_tags" in types and "artists" not in types:
    types.append("artists");

  db_c = db_conn.cursor()
  
  artist_data, track_data, album_data = fetch_weekly_user_data(user,data_range,types)
  
  if "track_tags" in types:
    track_tag_info = normalize_tags(fetch_track_tags(track_data))
    track_tag_counts = {}
    for begin_ts,end_ts,(artist,track),playcount in track_data:
      for tag in track_tag_info[track]:
	if not track_tag_counts.has_key(((begin_ts,end_ts),tag)):
	  track_tag_counts[((begin_ts,end_ts),tag)] = 0
	track_tag_counts[((begin_ts,end_ts),tag)] += int(playcount)
    for ((begin_ts,end_ts), tag), playcount in track_tag_counts.items():
      db_c.execute("INSERT INTO track_tags VALUES (?,?,?,?)", (begin_ts,end_ts,tag,playcount))

  if "artist_tags" in types:
    artist_tag_info = normalize_tags(fetch_artist_tags(artist_data))
    artist_tag_counts = {}
    for begin_ts,end_ts,artist,playcount in artist_data:
      for tag in artist_tag_info[artist]:
	if not artist_tag_counts.has_key(((begin_ts,end_ts),tag)):
	  artist_tag_counts[((begin_ts,end_ts),tag)] = 0
	artist_tag_counts[((begin_ts,end_ts),tag)] += int(playcount)
    for ((begin_ts,end_ts), tag), playcount in artist_tag_counts.items():
      db_c.execute("INSERT INTO artist_tags VALUES (?,?,?,?)", (begin_ts,end_ts,tag,playcount))

  for begin_ts,end_ts,name,playcount in artist_data:
    db_c.execute("INSERT INTO artists VALUES (?,?,?,?)", (begin_ts,end_ts,name,playcount))
    
  for begin_ts,end_ts,(artist,name),playcount in track_data:
    db_c.execute("INSERT INTO tracks VALUES (?,?,?,?)", (begin_ts,end_ts,name,playcount))
    
  for begin_ts,end_ts,name,playcount in album_data:
    db_c.execute("INSERT INTO albums VALUES (?,?,?,?)", (begin_ts,end_ts,name,playcount))

def initdb(db_conn):
  db_c = db_conn.cursor()
  db_c.execute('''CREATE TABLE artists (begin INT, end INT, artist TEXT, playcount INT)''')
  db_c.execute('''CREATE TABLE tracks (begin INT, end INT, track TEXT, playcount INT)''')
  db_c.execute('''CREATE TABLE albums (begin INT, end INT, album TEXT, playcount INT)''')
  db_c.execute('''CREATE TABLE track_tags (begin INT, end INT, track_tag TEXT, playcount INT)''')
  db_c.execute('''CREATE TABLE artist_tags (begin INT, end INT, artist_tag TEXT, playcount INT)''')

import sys, os

if __name__ == "__main__":
  if len(sys.argv) < 2:
    print "Usage: ./fetch_lastfm.py <user>"
    sys.exit(1)
  user = sys.argv[1]
  
  db_name = "lastfm-%s.db" % user
  init_db = not os.path.exists(db_name)
  db_conn = sqlite3.connect(db_name)

  if init_db:
    print "Initializing database: %s" % db_name
    initdb(db_conn)
    last_week = None
  else:
    db_c = db_conn.cursor()
    db_c.execute('''SELECT MAX(begin) FROM artists''')
    last_week, = db_c.fetchone()
    if last_week:
      print "Updating from", time.strftime("%b %d, %Y",time.gmtime(last_week+7*24*60*60))
  
  try:
    write_weekly_user_data(db_conn,user,data_range=(last_week,None),types=["artists","albums","tracks","track_tags","artist_tags"])
    write_weekly_user_data(db_conn,user,data_range=(last_week,None),types=["artists"])
  except:
    os.unlink(db_name)
    print "Failed, try again later"
    raise
  else:
    db_conn.commit()
    db_conn.close()
