package com.chinapex.data.pipeline;

import java.util.Base64;
import java.io.IOException;
import java.time.ZoneId;
import java.rmi.server.UID;
import java.util.TimeZone;
import java.util.Map;
import java.util.Properties;
import java.util.Iterator;

import com.google.common.base.Charsets;
import org.apache.commons.lang3.StringUtils;

import org.apache.hadoop.io.NullWritable;
// import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.fs.Path;



import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.JobConfigurable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.LineRecordReader;


import org.apache.hadoop.hive.common.FileUtils;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.plan.PartitionDesc;
// import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.avro.AvroGenericRecordWritable;
import org.apache.hadoop.hive.serde2.avro.AvroSerdeException;
import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils.AvroTableProperties;



import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.GenericDatumReader;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;

import com.github.luben.zstd.Zstd;
public class AvroFragInputFormat 
        extends FileInputFormat<NullWritable, AvroGenericRecordWritable> implements JobConfigurable {
    protected JobConf jobConf;

    static class AvroFragRecordReader
            implements RecordReader<NullWritable, AvroGenericRecordWritable>, JobConfigurable {
        protected JobConf jobConf;

        final private LineRecordReader lineReader;
        final private GenericDatumReader<GenericRecord> gdr;
        final private UID recordReaderID;
        private ZoneId writerTimezone;
        protected Schema _schema;

        private Iterator<GenericData.Record> _recordIterator ;


        public AvroFragRecordReader(JobConf job, FileSplit split, Reporter reporter) throws IOException {
            this.lineReader = new LineRecordReader(job, split, "\n".getBytes(Charsets.UTF_8));
            this.recordReaderID = new UID();
            this.writerTimezone = TimeZone.getDefault().toZoneId();
            this.gdr = new GenericDatumReader<GenericRecord>();

            try {
                _schema = getSchema(job, split);
            } catch (AvroSerdeException e) {
                throw new IOException(e);
            }
            if (_schema != null) {
                Schema arraySchema = Schema.createArray(_schema);
                gdr.setExpected(arraySchema);
                gdr.setSchema(arraySchema);
            }
        }

        public void configure(JobConf jobConf) {
            this.jobConf = jobConf;
        }


        public synchronized void close() throws IOException {
            lineReader.close();
        }
        public  synchronized long getPos() throws IOException {
            return lineReader.getPos();
        }
        
        public synchronized boolean next(NullWritable nullWritable, AvroGenericRecordWritable record)
                throws IOException {
            if (_recordIterator != null && _recordIterator.hasNext()) {
                GenericData.Record r = _recordIterator.next();
                record.setRecord(r);
                record.setRecordReaderID(recordReaderID);
                record.setFileSchema(this._schema);
                return true;
            }
            LongWritable lineKey = new LongWritable();
            Text lineVal = new Text();
            if (!lineReader.next(lineKey, lineVal)) return false;

            byte[] avro_zstd = Base64.getDecoder().decode(lineVal.getBytes());
            byte[] avro = Zstd.decompress(avro_zstd, avro_zstd.length * 10);
            Decoder binDecoder = DecoderFactory.get().binaryDecoder(avro, null);

            _recordIterator = ((GenericData.Array<GenericData.Record>) gdr.read(null, binDecoder)).iterator();
            GenericData.Record r = _recordIterator.next();
            record.setRecord(r);
            record.setRecordReaderID(recordReaderID);
            record.setFileSchema(this._schema);

            return true ;
        }

        public NullWritable createKey() {
            return NullWritable.get();
        }

        public AvroGenericRecordWritable createValue() {
            return new AvroGenericRecordWritable(writerTimezone);
        }

        public synchronized float getProgress() throws IOException {
            return lineReader.getProgress();
        }

        private boolean pathIsInPartition(Path split, Path partitionPath) {
            boolean schemeless = split.toUri().getScheme() == null;
            if (schemeless) {
                Path pathNoSchema = Path.getPathWithoutSchemeAndAuthority(partitionPath);
                return FileUtils.isPathWithinSubtree(split,pathNoSchema);
            } else {
                return FileUtils.isPathWithinSubtree(split,partitionPath);
            }
        }

        private Schema getSchema(JobConf job, FileSplit split) throws AvroSerdeException, IOException {
            // Inside of a MR job, we can pull out the actual properties
            if(AvroSerdeUtils.insideMRJob(job)) {
                MapWork mapWork = Utilities.getMapWork(job);
                
                // Iterate over the Path -> Partition descriptions to find the partition
                // that matches our input split.
                for (Map.Entry<Path,PartitionDesc> pathsAndParts: mapWork.getPathToPartitionInfo().entrySet()){
                    Path partitionPath = pathsAndParts.getKey();
                    if(pathIsInPartition(split.getPath(), partitionPath)) {
                        Properties props = pathsAndParts.getValue().getProperties();
                        if(props.containsKey(AvroTableProperties.SCHEMA_LITERAL.getPropName()) || props.containsKey(AvroTableProperties.SCHEMA_URL.getPropName())) {
                            return AvroSerdeUtils.determineSchemaOrThrowException(job, props);
                        }
                        else {
                            return null; // If it's not in this property, it won't be in any others
                        }
                    }
                }
            }

            // In "select * from table" situations (non-MR), we can add things to the job
            // It's safe to add this to the job since it's not *actually* a mapred job.
            // Here the global state is confined to just this process.
            String s = job.get(AvroTableProperties.AVRO_SERDE_SCHEMA.getPropName());
            if (StringUtils.isNotBlank(s)) {
                return AvroSerdeUtils.getSchemaFor(s);
            }
            // No more places to get the schema from. Give up.  May have to re-encode later.
            return null;
        }
    }
    

    @Override
    public RecordReader<NullWritable, AvroGenericRecordWritable> getRecordReader(InputSplit inputSplit, JobConf jc, Reporter reporter) throws IOException {
        return new AvroFragRecordReader(jc, (FileSplit)inputSplit, reporter);
    }

    public void configure(JobConf jobConf) {
        this.jobConf = jobConf;
    }

    public static void main(String[] args) {
        // String b64 = "KLUv/QRYMQAAbGFybHVvKb3dfA==";
        String b64 = "KLUv/QRYRR8AZvvaS9DsqgAAXzzVSvKT8wBfPEC7Oq8cVx5GIG3EiCOHF0izVElYGcqmGhF8kiWj2cRxYrsiQEAJJY1O3WNrF9mUrz+5FPcSTybXyGlSsMAAyADKAOJJpmlpnuYOUb0ui9WyMsxTu5IsKhXPElUa2xNaYs3WLKGnuZZOZrvudpcNKJTQ1UaQEFnUuMV2RY0oE1WqoimeTuQpluxJVdW2VU3SPM2NUkFOtNwKOEoOBR0l6SF2mBtourEgPTs8UJRj4yzFbXKI+zRNEHLd40imJxZd1VJarmlpZXdZWldxLammuLe4oqqPOH4V348vviCPbJbFXEjmLj2uw9E7Mt8vvqANy/vgXqg2x8Dv8v0ORrtiDkAlQLfHzT7m10kpOt0H9iw8uqXDF/3QOtnLPX3Fpv7d/Mm6rpjzZl5m9rGfR4Bi4DHCjqgirAgGNCVfFAstdkRETbU0xg4QE6uuTK3qTa1G74l7QsBNJ7wn7omBGviHYGgenBkn+9ltNmkrqCr1QQbVm1q1JG2El63fFHEaPOfIXQbRdTN9hfBhYt3XhFVtmFivi0AMd7gpfK7n2cy92+tkz6M93HsXXYeBHgfkP0WfwOaqZ1Lo381/iPloRvuVl1+tVIsdaoSbQhzPdU2uJo2cXJJTSEvVloS0JW8KY2LAhExKrRPPvx/tEnMQkA2MiQmXmMv8uBxzV8xz5E4S8wVftk6DzTkzvQY2D8Ff8/HdynuIOcybYb7gEGzWzXQAryH4SRY7FMPU5guq5P7FF9zdZliss15X/RA3C+VlVa6EjFzvfz+f670Gn5qG6OGgH+3AzwMzO/CAQFQzx2K+2/WXrXOwqW2uGgfkLUDa5qojsNA8BguPbtGZZTEPJPoRL2XGATkLre/IXGfzsB4aFjFnuHAeKBcxXzCj5L3Aap0KCADS1EK1mQNOLQJXk9oK6ioEpXVPLAJMAGHe1Ht5A6xbwboVQj//u1GJHybnR0A9fUb7n8xTbn1UiulInuEe9wW603KP4SYtTnEM0TI0z3Dc+wDr6mQvh5g3BL0rRmLdl60XFdG5HjrFaq0yUqzW6gLVOM2SHHeISctqHQhRq4Vq88vWbz8+k5lxu1kWcznXcYbo02XDy0H9JveYXx+FqNW5HppkpgS07HsykZCoFRRyyxOpdJrZ32BaBC4tIEACA04N7QGyDhPApICvW30yzd71EWeCJq9MCrR4aTD1E4p2grywedMxqsdlKAGiQ74uSof6igd0vDG83Xyz3Utb0GKxYKzMsEwWhRH+qS/JNU7No7Ru6BIj+uW+XVgPYwCVeRdzA5J/px1vMgZAYECc8zE/iuQQoMr4J08=";
        byte[] avro_zstd = Base64.getDecoder().decode(b64.getBytes());
        byte[] avro = Zstd.decompress(avro_zstd, avro_zstd.length * 10);
        System.out.println(new String(avro));
    }

}
