package com.ideal.hadoopadmin.crontab.hive;

import com.ideal.hadoopadmin.crontab.db.ConnectionManager;

import java.sql.Connection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;

public class LoadAllHiveMeta {
    private StringBuffer hql;
    private List<Map<String, Object>> rsList;
    private HashSet nameset;
    private HashSet hdfsPathset;
    private StringBuffer names;
    private StringBuffer hdfsPaths;

    public LoadAllHiveMeta(StringBuffer hql) {
        this.hql = hql;
    }

    public List<Map<String, Object>> getRsList() {
        return rsList;
    }

    public HashSet getNameset() {
        return nameset;
    }

    public HashSet getHdfsPathset() {
        return hdfsPathset;
    }

    public StringBuffer getNames() {
        return names;
    }

    public StringBuffer getHdfsPaths() {
        return hdfsPaths;
    }

    public LoadAllHiveMeta invoke() {
        hql.append(" select dbs.name,tbls.tbl_name,sds.location,tbls.create_time,dbs.db_id,");
        hql.append(" tbls.tbl_id,tbls.sd_id,tbls.tbl_type, sds.serde_id,sds.input_format,");
        hql.append(" sds.output_format,sds.cd_id from dbs,tbls,sds where dbs.db_id=tbls.db_id");
        hql.append(" and tbls.sd_id=sds.sd_id");
        Connection conn = ConnectionManager.getHiveConnection();
        rsList = ConnectionManager.queryDB(conn, hql.toString());
        hql.setLength(0);
        nameset = new HashSet();
        hdfsPathset = new HashSet();
        names = new StringBuffer();
        hdfsPaths = new StringBuffer();
        return this;
    }
}
