#!/usr/bin/env python
# coding:utf8


ftp_uri = '202.108.212.209'
ftp_port = '22922'
ftp_user = 'KnownsecUpload'
ftp_pw = 'CBE0DE18042688C6D2E68731E1258A11'

app_id_filename = 'app_id'

# ws stuff
ws_uri = 'http://192.168.28.220/jobtracker?wsdl'
task_ws_uri = ''
task_imp_uri = ''
task_imp_location = ''
task_imp_filter_uri = ''

# task app stuff
## xml stuff
_rs_dict = {
	'cpu_hz': '2400',
	'cpu_limit': '90',
	'memory': '2048',
	'disk_free': '4096',
	'os': 'ubuntu',
}
resource_require_xml = '''<?xml version="1.0" encoding="utf-8"?>
<require>
	<cpuMHz attribute="GT">%(cpu_hz)s</cpuMHz>
    <cpuUsed attribute="LT">%(cpu_limit)s</cpuUsed>
	<memoryFreeMb attribute="GT">%(memory)s</memoryFreeMb>
<diskFreeMb>%(disk_free)s</diskFreeMb>
<os attribute="or">%(os)s</os>
</require>
''' % _rs_dict
resource_require_xml = '''<?xml version="1.0" encoding="utf-8"?>
<require>
<os>%(os)s</os>
</require>
''' % _rs_dict

app_reg_dict = {
	'appName': 'crawler',
	'appDisplayName': u'爬虫测试环境',
	'username': 'wang_an',	# no use now
	'password': 'wang_an',	# no use now
	# no use, need to be constructed in tools code
	'apps':{
		'appContent': '',	# app file or dir, base64
		'appFileName': 'crawler.tgz',
		'appFileMd5Value': '',	# file md5 above
		'scriptContent': '',	# base64
		'scriptFileName': 'job.py',
		'scriptMd5Value': '',
		'resourceRequire': resource_require_xml	# xml
	}
}

# job recive stuff

## parameter xml stuff

app_rec_dict = {
	'appId': '',	# reg returned value, to be updated
	'userJobId': 'shanghai_crawler',	# use name it
	'parameter': '',	# xml
	'resultAddress': '',	# url, 
	'jobPriority': 0,	# 0:vip, 1-10
	'copyNumber': 20,	# no limit? now have 10 ubuntu
	'budgetRunTime': 120,	# secends
	'username': 'wang_an',
	'password': 'wang_an',
	'resourceProvince': 2,	# int, 1: beijing, now can use: 2:shanghai, 29:qinghai
	'ispName': -1,	# xx: first, int, 1: dianxin, 2: liantong, 3:yidong, 4:jiaoyuwang, -1: not set
					# seceond int, 1: share, 2: dynamic, 3: alone
	'Bandwidth': 'GT|5'	# M, bigest 100M?
}
