package com.hugedata.idc.etl;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.http.HttpEntity;
import org.apache.http.HttpException;
import org.apache.http.HttpResponse;
import org.apache.http.HttpStatus;
import org.apache.http.StatusLine;
import org.apache.http.client.HttpClient;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.util.EntityUtils;
import org.apache.log4j.Logger;

import com.hugedata.idc.utils.HDFSUtils;

public class HTTP2HDFS
{
    private static final Logger LOGGER = Logger.getLogger(HTTP2HDFS.class);

    private static final String DIR_PATTERN = "<a href=\"/MFileServer/servlet/fs/";
    
    public static void main(String[] args) throws IOException, HttpException, IllegalAccessException, InstantiationException, InvocationTargetException, NoSuchMethodException
    {
        // String[] ss =
        // { "http://file.hugebit.net:28080/MFileServer/servlet/fs/20150122/10",
        // "/cdndev/" };
        
        String fsUrl = args[0];
        String hdfsPath = args[1];
        
        LOGGER.info("fsUrl: " + fsUrl);
        LOGGER.info("hdfsPath: " + hdfsPath);
        
//        String fsUrl = "http://file.hugebit.net:28080/MFileServer/servlet/fs/20150422/";
//        String hdfsPath = "/idc/qostest1/test1";
        
        DownFileInfo tmpFileInfo = new DownFileInfo();
        List<DownFileInfo> downUrlList = new ArrayList<DownFileInfo>(100);
        List<String> dirList = retrieveHourDirNames(fsUrl);
        for (String dirName : dirList)
        {
            String tempUrl = fsUrl + "/" + dirName + "/";
            List<String> fileNameList = retrieveFileNames(tempUrl);
            for (String tempFileName : fileNameList)
            {
                DownFileInfo tempBean = (DownFileInfo) BeanUtils.cloneBean(tmpFileInfo);
                tempBean.setFileName(tempFileName);
                tempBean.setDownUrl(tempUrl + tempFileName);
                downUrlList.add(tempBean);
            }
        }
        
        download(downUrlList, hdfsPath);
    }
    
    /**
     * 
    * @Title: download
    * @Description: 将指定的文件从http文件服务器copy到hdfs
    * @author wangxiaoming
    * @param downUrlList
    * @param hdfsPath
    * @throws IOException
    * @throws HttpException
    * @throws
     */
    public static void download(List<DownFileInfo> downUrlList, String hdfsPath) throws IOException, HttpException
    {
        HttpClient client = new DefaultHttpClient();
        for (DownFileInfo tempInfo : downUrlList)
        {
            HttpGet httpget = new HttpGet(tempInfo.getDownUrl());
            HttpResponse response = client.execute(httpget);
            StatusLine statusLine = response.getStatusLine();
            if (statusLine.getStatusCode() == HttpStatus.SC_OK)
            {
                Configuration config = HDFSUtils.getConf();
                InputStream is = null;
                FileSystem fs = null;
                OutputStream output = null;
                try
                {
                    is = response.getEntity().getContent();
                    fs = FileSystem.get(config);
                    String fileName = tempInfo.getFileName();
                    output = fs.create(new Path(hdfsPath + "/" + fileName));
                    int bytes = IOUtils.copy(is, output);
                    LOGGER.info("copy file " + fileName + ",size " + bytes);
                }
                finally
                {
                    IOUtils.closeQuietly(output);
                    IOUtils.closeQuietly(fs);
                    IOUtils.closeQuietly(is);
                }
            }
            else
            {
                throw new HttpException("error code:" + statusLine.getStatusCode());
            }
        }
    }
    
    public static List<String> retrieveHourDirNames(String url) throws IOException, HttpException
    {
        List<String> fileNameList = new ArrayList<String>(10);
        HttpClient client = new DefaultHttpClient();
        HttpGet httpGet = new HttpGet(url);
        HttpResponse httpResponse = null;
        httpResponse = client.execute(httpGet);
        int statusCode = httpResponse.getStatusLine().getStatusCode();
        if (statusCode != HttpStatus.SC_OK)
        {
            throw new HttpException("failed retrieveHourDirNames from url:" + url + " failure");
        }
        else
        {
            HttpEntity entity = httpResponse.getEntity();
            // get请求访问url刷出来的页面源代码
            String webContent = EntityUtils.toString(entity);
            String[] strArr = StringUtils.split(webContent, "\n");
            for (String str : strArr)
            {
                String tempStr = StringUtils.trimToEmpty(str);
                // 匹配文件名所在的行，例如："<a href=\"/MFileServer/servlet/fs/20150421/23\"><img alt=\"Directory\" src=\"/MFileServer/images/dir.gif\"/>";
                if (StringUtils.isNotBlank(tempStr)
                        && StringUtils.startsWith(tempStr, DIR_PATTERN))
                {
                    // 截取文件名
                    int prevIdx = DIR_PATTERN.length();
                    String tempFileName = StringUtils.substring(tempStr, prevIdx+9, prevIdx+11);
                    
                    tempFileName = StringUtils.trim(tempFileName);
                    fileNameList.add(tempFileName);
                }
            }
        }
        return fileNameList;
    }

    /**
     * 获取文件需要下载的文件列表
     * 
     * @param url
     * @return
     * @throws IOException
     * @throws HttpException
     */
    public static List<String> retrieveFileNames(String url) throws IOException, HttpException
    {
        List<String> fileNameList = new ArrayList<String>(10);
        HttpClient client = new DefaultHttpClient();
        HttpGet httpGet = new HttpGet(url);
        HttpResponse httpResponse = null;
        httpResponse = client.execute(httpGet);
        int statusCode = httpResponse.getStatusLine().getStatusCode();
        if (statusCode != HttpStatus.SC_OK)
        {
            throw new HttpException("failed retrieveFileNames from url:" + url + " failure");
        }
        else
        {
            HttpEntity entity = httpResponse.getEntity();
            // get请求访问url刷出来的页面源代码
            String webContent = EntityUtils.toString(entity);
            String[] strArr = StringUtils.split(webContent, "\n");
            for (String str : strArr)
            {
                // 匹配文件名所在的行，例如："         11407_1201-11-1003_crawl_20140707190012_crawl-20140707190000.tar.bz</a>"
                if (StringUtils.isNotBlank(str)
                        && (StringUtils.contains(str, ".tar.bz</a>")
                                || StringUtils.contains(str, ".tar.gz</a>"))
                        && (StringUtils.contains(str, "_qos_")
                                || StringUtils.contains(str, "_traceroute_")))
                {
                    // 截取文件名
                    int endIdx = StringUtils.indexOf(str, "</a>");
                    String tempFileName = StringUtils.substring(str, 0, endIdx);
                    tempFileName = StringUtils.trim(tempFileName);
                    fileNameList.add(tempFileName);
                }
            }
        }
        return fileNameList;
    }

}
