#_*_coding:utf-8_*_
from __future__ import division
import boto
import boto.s3.connection
import functions.fun as F
#import threading
from multiprocessing import Pool

import os
from time import sleep,time

conn = boto.connect_s3(
            aws_access_key_id = '4C3Z7YH7CCN4MV7MZBUM',
            aws_secret_access_key = 'ASuvrSgfIearbU2VnYnNleecgLstRBHw1B8lE7QZ',
            host = 's3.ceph.com',is_secure=False,
            calling_format = boto.s3.connection.OrdinaryCallingFormat())

if __name__ == '__main__':
	print '######## Init Data '
	bucketname = 'speedtest'
	listfile = os.listdir('./tmp') 
	size = F.getdirsize('./tmp')
	print '######## Start to Run'
	#########Processes###########
	start = time()
	rs = []
	pro_num = 50
	p = Pool(processes=pro_num)
	for f in listfile:	
	    res = p.apply_async(F.DownloadFile,args=(conn,'speedtest','tmp/%s'%f,))
	    rs.append(res)
	p.close()
	p.join()
	end = time()
	#########Results#########
	print '######## Dealing Result'
	TT = end-start
	dt = []
	ms = []
	for r in rs:
		dt.append(r.get()[1])
		ms.append(r.get()[2])
	avgtime = sum(dt)/len(dt)
	TM = len(ms)
	requests =TM/TT 
	
	print '####### Print Result'
        print '''
		TotalNum:%d Processes:%d 
		TotalTime:%d 
		AvgTime:%f  Requests/s:%f DiskIO/sec: %f MB/sec 
		MaxTime:%f  MinTime:%f 
		Success:%d  Mission:%d   Result:%d%%
		'''%(TM,pro_num,TT,avgtime,requests,size/TT,max(dt),min(dt),ms.count(0),ms.count(1),ms.count(0)/TM*100)
	print '		',F.lgroupby(dt)

