/*
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 * http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing,
 * software distributed under the License is distributed on an
 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 * KIND, either express or implied.  See the License for the
 * specific language governing permissions and limitations
 * under the License.
 */
package com.allyes.flume.serializer;

import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.avro.AvroRuntimeException;
import org.apache.avro.Schema;
import org.apache.avro.file.CodecFactory;
import org.apache.avro.file.DataFileWriter;
import org.apache.avro.generic.GenericDatumWriter;
import org.apache.avro.io.DatumWriter;
import org.apache.flume.Context;
import org.apache.flume.Event;
import org.apache.flume.conf.Configurable;
import org.apache.flume.serialization.EventSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;



import static org.apache.flume.serialization.AvroEventSerializerConfigurationConstants.*;

/**
 * <p>
 * This class serializes Flume {@linkplain org.apache.flume.Event events} into Avro data files. The
 * Flume event body is read as an Avro datum, and is then written to the
 * {@link org.apache.flume.serialization.EventSerializer}'s output stream in Avro data file format.
 * </p>
 * <p>
 * The Avro schema is fixed as RawLog defined in rawlog.avsc.
 * </p>
 */
public class RawLogEventSerializer implements EventSerializer, Configurable {
  public static final String SCHEMA = "schema";
  public static final String SCHEMA_AINSIGHT = "ainsight";
  public static final String SCHEMA_AIM = "aim";
  public static final String SCHEMA_ADX = "adx";
  public static final String DEFAULT_SCHEMA= SCHEMA_AINSIGHT; // no codec

  private static final Logger logger =
      LoggerFactory.getLogger(RawLogEventSerializer.class);

  private final OutputStream out;
  private DatumWriter<Object> writer = null;
  private DataFileWriter<Object> dataFileWriter = null;

  private int syncIntervalBytes;
  private String compressionCodec;
  private Schema schema;

  private RawLogEventSerializer(OutputStream out) {
    this.out = out;
  }

  @Override
  public void configure(Context context) {
    syncIntervalBytes =
        context.getInteger(SYNC_INTERVAL_BYTES, DEFAULT_SYNC_INTERVAL_BYTES);

    compressionCodec =
        context.getString(COMPRESSION_CODEC, DEFAULT_COMPRESSION_CODEC);

    // schema
    String schemaName = context.getString(SCHEMA, DEFAULT_SCHEMA);
    switch(schemaName) {
      case SCHEMA_AINSIGHT:
        schema = com.allyes.log.RawLog.getClassSchema();
        break;
      case SCHEMA_AIM:
        schema = com.allyes.aim.log.RawLog.getClassSchema();
        break;
      case SCHEMA_ADX:
        schema = com.allyes.adx.log.RawLog.getClassSchema();
        break;
      default:
        schema = null;
        throw new RuntimeException("Invalid schema: " + schemaName);
    }
  }

  @Override
  public void afterCreate() throws IOException {
  }

  @Override
  public void afterReopen() throws IOException {
    // impossible to initialize DataFileWriter without writing the schema?
    throw new UnsupportedOperationException("RawLog Avro API doesn't support append");
  }

  @Override
  public void write(Event event) throws IOException {
    if (dataFileWriter == null) {
      initialize();
    }
    dataFileWriter.appendEncoded(ByteBuffer.wrap(event.getBody()));
  }

  private void initialize() throws IOException {
    writer = new GenericDatumWriter<Object>(schema);
    dataFileWriter = new DataFileWriter<Object>(writer);
    dataFileWriter.setSyncInterval(syncIntervalBytes);

    try {
      CodecFactory codecFactory = CodecFactory.fromString(compressionCodec);
      dataFileWriter.setCodec(codecFactory);
    } catch (AvroRuntimeException e) {
      logger.warn("Unable to instantiate avro codec with name (" +
          compressionCodec + "). Compression disabled. Exception follows.", e);
    }

    dataFileWriter.create(schema, out);
  }

  @Override
  public void flush() throws IOException {
    if (dataFileWriter != null) {
      dataFileWriter.flush();
    }
  }

  @Override
  public void beforeClose() throws IOException {
  }

  @Override
  public boolean supportsReopen() {
    return false;
  }

  public static class Builder implements EventSerializer.Builder {
    @Override
    public EventSerializer build(Context context, OutputStream out) {
      RawLogEventSerializer writer = new RawLogEventSerializer(out);
      writer.configure(context);
      return writer;
    }

  }

}
