#! /usr/bin/env python 
import sys
import re
import urllib
import optparse
from BeautifulSoup import BeautifulSoup

from modules.utils import *

staff_base_url = 'http://www.nbjsfl.com'
# this is the tip just on the bottom of staff image, I use it to locate staff 
# image's validation
staff_image_tip = u'\xa1\xfc\xb1\xa3\xb4\xe6\xb4\xcb\xd5\xc5\xcd\xbc\xc6\xac\xa3\xa8\xc7\xeb\xb5\xe3\xbb\xf7\xca\xf3\xb1\xea\xd3\xd2\xbc\xfc\xd1\xa1\xd4\xf1\xa1\xb0\xc4\xbf\xb1\xea\xc1\xed\xb4\xe6\xce\xaa(A)\xa1\xad\xa1\xb1\xd1\xa1\xcf\xee\xa3\xa9'

def build_search_result_url(options):
    """
    build search result url for http://www.nbjsfl.com/search_music.asp

    Note: it needs POST method instead of GET
    example:
        http://www.nbjsfl.com/search_music.asp
        xxx=tm&sss=%BE%D5%BB%A8%CC%A8&Submit=%CB%D1%CB%F7
    """
    base_url = 'http://www.nbjsfl.com/search_music.asp'

    if options.name:
        word = options.name
        category = 'tm'
    elif options.artist:
        word = options.artist
        category = 'yc'
    #elif options.lyric:
        #word = options.lyric
        #category = 'gc'
    #elif options.lyric_artist:
        #word = options.lyric_artist
        #category = 'zc'
    #elif options.composer:
        #word = options.composer
        #category = 'zq'
    else:
        return None

    try:
       encoding = guess_encoding(word)
    except:
        print "I can not guess the encoding of your input"
        raise
    if encoding!='gbk':
        gb_word = convert_string_encoding(word, encoding, 'gbk')
    else:
        gb_word = word

    params_tuples = (
        ('xxx',category),
        ('sss',gb_word),
        ('Submit','\xcb\xd1\xcb\xf7'),
    )
    params = urllib.urlencode(params_tuples)
    return base_url, params

class StaffInfo:
    def __init__(self):
        self.url = ''
        self.name = ''

class StaffPage:
    def __init__(self):
        self.reset()

    def reset(self):
        self.staff_info_list = []
        
    def open(self, url, params):
        soup = get_soup(url, params)
        links = soup.findAll('a', {'href':re.compile('view_music\.asp\?id=\d+')})
        if len(links)==0:
            return False, "Sorry, don't find matched staff."

        for link in links:
            info = StaffInfo()
            info.name = link.string
            relative_link = link['href']
            info.url = staff_base_url + '/' + relative_link
            self.staff_info_list.append(info)
        return True, ''

    def select_staff(self):
        result = [staff_info for staff_info in self.staff_info_list
                  if staff_info.url!='']
        return result

class StaffDownloadPage:
    def get_download_link(self, dl_staff_url):
        dl_page_source = urllib.urlopen(dl_staff_url)
        dl_soup = BeautifulSoup(dl_page_source.read())
        staff_links = dl_soup.findAll('a', {'class':'2'})
        if staff_links and staff_links[0].string==staff_image_tip:
            dl_link = staff_links[0]['href'].encode('gbk')
            return dl_link
        else:
            return None

def download_staff(url, params, options):
    staff_page = StaffPage()
    result, reason = staff_page.open(url, params)
    selected_staffs = staff_page.select_staff()
    if result and selected_staffs:
        dl_staff_url = selected_staffs[0].url
        dl_staff_name =selected_staffs[0].name

        dl_staff_page = StaffDownloadPage()
        dl_link = dl_staff_page.get_download_link(dl_staff_url) 
        dl = Downloader(output_indent=' '*4, dry_run_mode=options.dry_run)
        dl.download(dl_staff_name+dl_link[-4:], dl_link)

if __name__ == '__main__':
    parser = optparse.OptionParser(usage="%prog [OPTION]...",
                                   version="%prog1.0")
    parser.add_option('-n', '--name', dest='name',
                      help='song\'s name to search')
    parser.add_option('-a', '--artist', dest='artist',
                      help='artist\'s name to search')
    parser.add_option('-t', '--dry-run', dest='dry_run',
                      action="store_true", default=False,
                      help='do not actually download files')
    options, args = parser.parse_args()

    url, params = build_search_result_url(options)
    download_staff(url, params, options)



