# -*- coding: utf-8  -*-
#    Large Fair Use Image Bot - A Python bot that uses Pywikipediabot framework and works on MediaWiki wikis at general.
#    Copyright (C) 2008 OsamaK
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU General Public License as published by
#    the Free Software Foundation, either version 3 of the License, or
#    (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU General Public License for more details.
#
#    You should have received a copy of the GNU General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
#    Please report bugs or help imporving this program by connecting to <OsamaK.WFM@gmail.com>
import wikipedia
import time
import shelve
from okbot import *
catname = raw_input("Enter the category name:")
imagename = raw_input("Enter the image name:")
if not catname: catname = 'Non-free logos'
if imagename:
  imagename = imagename + '|'
else:
  imagename = '!|'
site = wikipedia.getSite(code='en',fam='wikipedia')
lasttime = False

def main():
  timenum = 0
  while True:
    if timenum == 0:
      cmcontinue = largefu()
    else:
      cmcontinue = largefu(cmcontinue)
    timenum = 1
    if lasttime: break

def largefu(cmcontinue=imagename):
  #reportfile = open("../public_html/largfu",'a')
  reportfile = open("largfu",'a')
  db = shelve.open("largefu.dat")
  #http://en.wikipedia.org/w/api.php?action=query&list=categorymembers&cmtitle=Category:Non-free_logos&cmnamespace=6&cmcontinue=!|&cmlimit=200&format=jsonfm
  predata = {
             'action': 'query',
             'list': 'categorymembers',
             'cmtitle': 'Category:' + catname.lstrip("Category:"),
             'cmnamespace': '6',
             'cmcontinue': cmcontinue,
             'cmprop': 'title',
             'cmlimit': '200',
             'format': 'json',
              }
  data = getAPI(site, predata)
  if data == None: return cmcontinue # Was there any error?
  if 'query-continue' in data:
    cmcontinue = data['query-continue']['categorymembers']['cmcontinue']
  else:
    cmcontinue = cmcontinue
    lasttime = True

  for page in data['query']['categorymembers']:
    title = page['title']
    if title.lower().endswith('svg'):
      wikipedia.output("%s is a SVG. Skipping." % title)
      continue
    predata = {#http://en.wikipedia.org/w/api.php?action=query&titles=Image:Where%20in%20world%20is%20osama.jpg&prop=imageinfo&iiprop=size&format=jsonfm
               'action': 'query',
               'prop': 'imageinfo',
               'titles': title,
               'iiprop':'size',
               'format': 'json',
              }
    sizedata = getAPI(site, predata)
    if sizedata == None: return cmcontinue
    for imageid in sizedata['query']['pages']:
      if not 'imageinfo' in sizedata['query']['pages'][imageid]:
        wikipedia.output("%s has problem. Skipped." % title)
        continue
      width = sizedata['query']['pages'][imageid]['imageinfo'][0]['width']
      height = sizedata['query']['pages'][imageid]['imageinfo'][0]['height']
      skipwordtest = None # No skip words found by default
      widthtest = False

      if width > 500 or height > 500:
        widthtest = True
        pageget = wikipedia.Page(site,title).get()
        for skipword in ['fairusereduce','fair use reduce','reduce size','reduce','fair-use reduce','image-toobig','comic-ovrsize-img','non-free-reduce','nfr','non-free reduce']:
          if pageget.lower().find(skipword) != -1:
            skipwordtest = skipword
            break
      else:
        wikipedia.output(u"%s's size is %s × %s only. Skipped." % (title,width,height))
        continue

      if widthtest and (not skipwordtest) and (not str(imageid) in db):
        wikipedia.output(u"%s's size is %s × %s! It is large." % (title,width,height))
        imagePage = wikipedia.Page(site,title)
        put_page(site, imagePage , u"{{Non-free reduce}}\n\n" + imagePage.get() , u"Non-free %s × %s image, {{[[Template:Non-free reduce|Non-free reduce]]}}" % (width,height))
        reportfile.write(u"%s %s × %s\n".encode('utf-8') % (title.encode('utf-8'), width, height))
        db[str(imageid)] = True
      elif str(imageid) in db:
        wikipedia.output(u"%s is in the log. Skipped" % title)
      elif skipwordtest:
        wikipedia.output(u"%s includes \"%s\". Skipped." % (title, skipwordtest))

    time.sleep(0.5)
  reportfile.close()
  db.close()
  return cmcontinue

if __name__ == '__main__':
    try:
        main()
    finally:
        wikipedia.stopme()
