#!/usr/bin/env python

# crawler client for ppmsg
# copyright gluo 2013
#   gingerluo@163.com
# this can be a sample client code

############## sample ###################
#   stage 1:
#   range(100).html

#   stage 2:
#   212345.html

#   stage 3:
#   212345.html, 212345_2.html, 212345_3.html, 212345_4.html

#   stage 4:
#   output resource links

#   final:
#   Ants those resources

############## end sample ###############

import optparse, sys, os


p = optparse.OptionParser()
p.add_option('--stage','-s',default='1')
p.add_option('--workdir','-w',default='')
options,arguments = p.parse_args()

STAGES = 5
stage = int(options.stage)

if stage < 2 or stage > STAGES:
  print 'enter valid stage ( 1 - %d )!'%STAGES
  sys.exit(1)

if options.workdir == '':
  print 'Enter working dir with -w !'
  sys.exit(1)
else:
  try:
    os.mkdir(options.workdir)
  except:
    pass
  finally:
    os.chdir(options.workdir)

from crawler_lib import *

MAIN_URL = "http://www.ppmsg.com/siwameitui/"

# retriving page link list, using Larva

# should be 4 stages
# FIXME: each stage should be stored in different py files

# stage 1
out_stage_1 = map(lambda x: MAIN_URL + str(x) + ".html", range(2, 5))
out_stage_1.insert(0, MAIN_URL)
print out_stage_1

#<li><a target="_blank" href="201306/20699.html"><img src="http://img.ppmsg.net/Photo/201306/20699.jpg" alt=""><br /></a></li>

if stage == 2:
  # stage 2
  insect_stage_2 = Spider(out_stage_1, "stage_2")
  insect_stage_2.threading_spider(
      res_type = insect_stage_2.RES_LINK,
      ptn=r"\d+/\d+\.html",
      match_mode=True
  )
  wait(insect_stage_2)

elif stage == 3:
  # stage 3:
  prev_stage_files = 'stage_2'
  insect_stage_3 = Spider(
      file(prev_stage_files, 'r').readlines(),
      "stage_3"
  )
  insect_stage_3.threading_spider(
      res_type = insect_stage_3.RES_LINK,
      ptn=r"\d+_?\d?\.html",
      match_mode=True
  )
  wait(insect_stage_3)

elif stage == 4:
  # stage 4:
  prev_stage_files = 'stage_3'
  insect_stage_4 = Spider(
      file(prev_stage_files, 'r').readlines(),
      "stage_4"
  )
  insect_stage_4.threading_spider(
      res_type = insect_stage_4.RES_JPG,
      #FIXME: did miss some jpgs
      ptn=r"ppmsg\.net",
      match_mode=True
  )
  wait(insect_stage_4)

elif stage == 5:
  # final stage:
  prev_stage_files = 'stage_4'
  final_insect = Ant(
      file(prev_stage_files, 'r').readlines()
  )
  wait(final_insect)

print "Done Stage " + str(stage)
print ''
