#!/usr/bin/python
import os
def start_project(name):
	os.mkdir(name)
	os.chdir(name)
	os.mkdir('app')
	f=open('app/__init__.py','wb')
	f.close()
	#create config.py
	f=open('config.py','wb')

strMainPy=r'''
from config import %(name)s_conf as conf
from anole.core.crawlapp import CrawlApp as App
from anole.core.fetcher import HttpFetcher as Fetcher
from anole.core.crawler import  Crawler
from anole.core.processormgr import ProcessorMgr
from anole.core.dns import SimpleDns as Dns
from app.%(name)s.frontier import UrlSupplier
if __name__=='__main__':
        usp=UrlSupplier(conf)
        app=App()
        fetcher=Fetcher()
	dns=Dns()
        parts={'USP':usp,'APP':app,'dns':dns,'FETCHER':fetcher}
        crawler=Crawler(parts,conf['COCURRENCY'])

        access={'USP':usp,'CRAWLER':crawler,'CONFIG':conf}
        pm=ProcessorMgr(access,conf['PROCESSORS'])
        pm.load_reg_ps(app)


        crawler.run()

'''
strConfig=r'''
%(name)s_conf={
	'TASK_NAME':'%(name)s',
	'COCURRENCY':1,
	'DATA_DIR':'',
	'PROCESSORS':['app.%(name)s.processor',],
}
'''
strProcessor=r'''
from anole.core.processor import ContentP,HeadP
class Processor(ContentP):
	_CONTENT_TYPES=[]
	def __init__(self,pm):
		pass
	def process_content(self,head,content,req):
		pass
'''
strFrontier=r'''
class UrlSupplier:
	def __init__(self,config):
		pass
	def get_url(self):
		pass
'''
def start_app(name):


	#add app/name dir, __init__.py and processor.py,frontier.py
	os.mkdir('app/'+name)
	os.chdir('app/'+name)
	f=open('__init__.py','wb')
	f.close()
	f=open('processor.py','wb')
	f.write(strProcessor)
	f.close()
	f=open('frontier.py','wb')
	f.write(strFrontier)
	f.close()

	#modify config.py
	os.chdir('../../')
	f=open('config.py','ab')
	f.write(strConfig%{'name':name})
	f.close()

	#create name.py
	f=open(name+'.py','wb')
	f.write(strMainPy%{'name':name})
	f.close()

if __name__=='__main__':
	import sys
	cmd=sys.argv[1]
	if cmd=='startproject':
		start_project(sys.argv[2])
	if cmd=='startapp':
		start_app(sys.argv[2])
