package com.xrui.hbase.tools;

import com.google.common.base.Preconditions;
import com.moz.kiji.common.flags.Flag;

import com.univocity.parsers.csv.CsvWriter;
import com.univocity.parsers.csv.CsvWriterSettings;
import com.xrui.hbase.*;
import com.xrui.hbase.conversion.CellConverter;
import com.xrui.hbase.model.RecordFields;
import com.xrui.hbase.schema.TableLayout;
import com.xrui.hbase.util.ResourceUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.nio.charset.StandardCharsets;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

/**
 * Command-line tool to dump specific rows from a hbase table.
 * <p>
 * Dumps the entity with ID 'bar' from table table 'table_foo',
 * displaying columns 'info:email' and 'derived:domain':
 * <pre>
 *   hbase dump hbase://.env/default/table_foo/cf/row_comp1=abc	&amp;row_comp2=123 \
 *       --out-file=/path/to/local/disk/hbase-dump.csv
 * </pre>
 */
public final class DumpTool extends BaseTool {
    private static final Logger LOG = LoggerFactory.getLogger(DumpTool.class);
    @Flag(name = "out-file", usage = "Output file path to dump.\n")
    private String mOutputFileFlag = null;

    @Flag(name = "max-versions", usage = "Max number of versions per cell to display")
    private int mMaxVersions = 1;

    @Flag(name = "timestamp", usage = "Min..Max timestamp interval to display,\n"
        + "\twhere Min and Max represent long-type time in milliseconds since the UNIX Epoch.\n"
        + "\tE.g. '--timestamp=123..1234', '--timestamp=0..', or '--timestamp=..1234'.")
    private String mTimestamp = "0..";

    /**
     * Lazy initialized timestamp intervals.
     */
    private long mMinTimestamp, mMaxTimestamp;

    /**
     * Program entry point.
     *
     * @param args The command-line arguments.
     * @throws Exception If there is an error.
     */
    public static void main(String[] args) throws Exception {
        System.exit(new HBaseToolLauncher().run(new DumpTool(), args));
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public String getName() {
        return "dump";
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public String getDescription() {
        return "Dump specific rows from a hbase table.";
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public String getCategory() {
        return "Data";
    }

    /**
     * {@inheritDoc}
     */
    @Override
    public String getUsageString() {
        return
            "Usage:\n"
                + "    hbase dump [flags...] (<hbase-uri>)\n"
                + "\n"
                + "Example:\n"
                + "    hbase dump hbase://.env/default/my_table/my_column_family/comp_a=abc&comp_b=123 \\\n"
                + "        --out-file=\"the file path\"\n"
                + "        --max-versions=2\n";
    }

    /**
     * {@inheritDoc}
     */
    @Override
    protected int run(List<String> nonFlagArgs) throws Exception {
        if (nonFlagArgs.isEmpty()) {
            getPrintStream().printf("URI must be specified as an argument%n");
            return FAILURE;
        } else if (nonFlagArgs.size() > 1) {
            getPrintStream().printf("Too many arguments: %s%n", nonFlagArgs);
            return FAILURE;
        }

        final HBaseURI argURI = HBaseURI.ofValue(nonFlagArgs.get(0));

        if ((null == argURI.getZookeeperQuorum())
            || (null == argURI.getInstance())
            || (null == argURI.getTable())
            || (null == argURI.getParameters())) {
            getPrintStream().printf("Specify a cluster, instance, and "
                + "table with argument hbase://zkhost/instance/table/cf/comp1=abc&comp2=123%n");
            return FAILURE;
        }

        if (mMaxVersions < 1) {
            getPrintStream().printf("--max-versions must be positive, got %d%n", mMaxVersions);
            return FAILURE;
        }

        if (mOutputFileFlag == null) {
            getPrintStream().printf("Specify entity with --out-file=file%n");
            return FAILURE;
        }

        final Pattern timestampPattern = Pattern.compile("([0-9]*)\\.\\.([0-9]*)");
        final Matcher timestampMatcher = timestampPattern.matcher(mTimestamp);
        if (timestampMatcher.matches()) {
            mMinTimestamp = ("".equals(timestampMatcher.group(1))) ? 0
                : Long.parseLong(timestampMatcher.group(1));
            final String rightEndpoint = timestampMatcher.group(2);
            mMaxTimestamp = ("".equals(rightEndpoint)) ? Long.MAX_VALUE : Long.parseLong(rightEndpoint);
        } else {
            getPrintStream().printf("--timestamp must be like [0-9]*..[0-9]*, instead got %s%n",
                mTimestamp);
            return FAILURE;
        }

        CsvWriterSettings settings = new CsvWriterSettings();
        //settings.setNullValue("N/A");
        settings.setHeaderWritingEnabled(true);

        int returnCode;
        HBase hbase = null;
        HBaseTable table = null;
        TableReader reader = null;
        RowScanner rowScanner = null;
        CsvWriter csvWriter = null;
        try {
            csvWriter = new CsvWriter(new File(mOutputFileFlag), StandardCharsets.UTF_8, settings);
            getPrintStream().println("Connecting to HBase cluster " + argURI.getZookeeperQuorum());
            hbase = HBase.open(argURI, getConf());

            getPrintStream().println("Connecting to HBase table " + argURI.getTable());
            table = hbase.openTable(argURI.getTable());
            reader = table.openTableReader();
            final ColumnName columnName = ColumnName.create(argURI.getColumnNames().get(0).getName());
            DataRequestBuilder.ColumnsDef columnsDef = DataRequestBuilder.ColumnsDef.create()
                .withMaxVersions(mMaxVersions)
                .add(columnName);
            DataRequest dataRequest = DataRequest.builder().addColumns(columnsDef)
                .withTimeRange(mMinTimestamp, mMaxTimestamp)
                .build();

            getPrintStream().println("Resolving HBase table layout...");
            final TableLayout layout = table.getLayout();
            TableReader.ScannerOptions scannerOptions = new TableReader.ScannerOptions();

            if (argURI.getParameters().size() > 0) {
                ToolUtils.RowFinder rowFinder = ToolUtils.populateRowKey(layout, argURI.getParameters());
                if (rowFinder.getRowKeyBytes() != null) {
                    EntityId rowKey = table.getEntityIdFromRowKey(rowFinder.getRowKeyBytes());
                    scannerOptions.setStartRow(rowKey);
                }
                if (rowFinder.getRowKeyFilter() != null) {
                    scannerOptions.setRowFilter(rowFinder.getRowKeyFilter());
                }

                getPrintStream().println(
                    "Looking up rows with query '" + argURI.getParameters() + "' from hbase table: " + argURI
                );
            } else {
                // dump all rows with full table scan
                getPrintStream().println(
                    "Looking up all rows from hbase table: " + argURI
                );
            }

            CellConverter converter = Preconditions.checkNotNull(layout.getConverter(columnName));
            rowScanner = reader.getScanner(dataRequest, scannerOptions);
            for (RowData rowData : rowScanner) {
                for (HBaseCell<Object> cell : rowData.asIterable(columnName.getFamily())) {
                    RecordFields recordFields = converter.convert(cell);
                    csvWriter.writeRow(recordFields.toMap());
                }
            }

            getPrintStream().println(
                String.format("%s records dumped to %s", csvWriter.getRecordCount(), mOutputFileFlag)
            );
            returnCode = SUCCESS;
        } catch (Exception e) {
            LOG.error(e.getMessage(), e);
            returnCode = FAILURE;
        } finally {
            if (null != csvWriter) {
                csvWriter.close();
            }
            ResourceUtils.closeOrLog(rowScanner);
            ResourceUtils.closeOrLog(reader);
            ResourceUtils.releaseOrLog(table);
            ResourceUtils.releaseOrLog(hbase);
        }
        return returnCode;
    }
}
