/*

 * Licensed to the Apache Software Foundation (ASF) under one

 * or more contributor license agreements.  See the NOTICE file

 * distributed with this work for additional information

 * regarding copyright ownership.  The ASF licenses this file

 * to you under the Apache License, Version 2.0 (the

 * "License"); you may not use this file except in compliance

 * with the License.  You may obtain a copy of the License at

 *

 *     http://www.apache.org/licenses/LICENSE-2.0

 *

 * Unless required by applicable law or agreed to in writing, software

 * distributed under the License is distributed on an "AS IS" BASIS,

 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

 * See the License for the specific language governing permissions and

 * limitations under the License.

 */

package com.bff.gaia.unified.runners.gaia;



import com.bff.gaia.unified.runners.core.construction.PTransformReplacements;

import com.bff.gaia.unified.runners.core.construction.PTransformTranslation;

import com.bff.gaia.unified.runners.core.construction.UnconsumedReads;

import com.bff.gaia.unified.runners.core.construction.WriteFilesTranslation;

import com.bff.gaia.unified.sdk.Pipeline;

import com.bff.gaia.unified.sdk.io.FileBasedSink;

import com.bff.gaia.unified.sdk.io.WriteFiles;

import com.bff.gaia.unified.sdk.io.WriteFilesResult;

import com.bff.gaia.unified.sdk.options.PipelineOptions;

import com.bff.gaia.unified.sdk.runners.AppliedPTransform;

import com.bff.gaia.unified.sdk.runners.PTransformOverrideFactory;

import com.bff.gaia.unified.sdk.runners.TransformHierarchy;

import com.bff.gaia.unified.sdk.transforms.PTransform;

import com.bff.gaia.unified.sdk.values.PCollection;

import com.bff.gaia.unified.sdk.values.PCollectionView;

import com.bff.gaia.unified.sdk.values.PValue;

import com.bff.gaia.unified.sdk.values.TupleTag;

import com.bff.gaia.unified.vendor.guava.com.google.common.annotations.VisibleForTesting;

import com.bff.gaia.streaming.api.environment.StreamExecutionEnvironment;

import com.bff.gaia.util.Preconditions;

import org.slf4j.Logger;

import org.slf4j.LoggerFactory;



import java.util.Collections;

import java.util.List;

import java.util.Map;



/**

 * This is a {@link GaiaPipelineTranslator} for streaming jobs. Its role is to translate the

 * user-provided {@link PCollection}-based job into a {@link

 * com.bff.gaia.streaming.api.datastream.DataStream} one.

 */

class GaiaStreamingPipelineTranslator extends GaiaPipelineTranslator {



  private static final Logger LOG = LoggerFactory.getLogger(GaiaStreamingPipelineTranslator.class);



  /** The necessary context in the case of a straming job. */

  private final GaiaStreamingTranslationContext streamingContext;



  private int depth = 0;



  public GaiaStreamingPipelineTranslator(StreamExecutionEnvironment env, PipelineOptions options) {

    this.streamingContext = new GaiaStreamingTranslationContext(env, options);

  }



  @Override

  public void translate(Pipeline pipeline) {

    // Ensure all outputs of all reads are consumed.

    UnconsumedReads.ensureAllReadsConsumed(pipeline);

    super.translate(pipeline);

  }



  // --------------------------------------------------------------------------------------------

  //  Pipeline Visitor Methods

  // --------------------------------------------------------------------------------------------



  @Override

  public CompositeBehavior enterCompositeTransform(TransformHierarchy.Node node) {

    LOG.info("{} enterCompositeTransform- {}", genSpaces(this.depth), node.getFullName());

    this.depth++;



    PTransform<?, ?> transform = node.getTransform();

    if (transform != null) {

      StreamTransformTranslator<?> translator =

          GaiaStreamingTransformTranslators.getTranslator(transform);



      if (translator != null && applyCanTranslate(transform, node, translator)) {

        applyStreamingTransform(transform, node, translator);

        LOG.info("{} translated- {}", genSpaces(this.depth), node.getFullName());

        return CompositeBehavior.DO_NOT_ENTER_TRANSFORM;

      }

    }

    return CompositeBehavior.ENTER_TRANSFORM;

  }



  @Override

  public void leaveCompositeTransform(TransformHierarchy.Node node) {

    this.depth--;

    LOG.info("{} leaveCompositeTransform- {}", genSpaces(this.depth), node.getFullName());

  }



  @Override

  public void visitPrimitiveTransform(TransformHierarchy.Node node) {

    LOG.info("{} visitPrimitiveTransform- {}", genSpaces(this.depth), node.getFullName());

    // get the transformation corresponding to hte node we are

    // currently visiting and translate it into its Gaia alternative.



    PTransform<?, ?> transform = node.getTransform();

    StreamTransformTranslator<?> translator =

        GaiaStreamingTransformTranslators.getTranslator(transform);



    if (translator == null || !applyCanTranslate(transform, node, translator)) {

      String transformUrn = PTransformTranslation.urnForTransform(transform);

      LOG.info(transformUrn);

      throw new UnsupportedOperationException(

          "The transform " + transformUrn + " is currently not supported.");

    }

    applyStreamingTransform(transform, node, translator);

  }



  @Override

  public void visitValue(PValue value, TransformHierarchy.Node producer) {

    // do nothing here

  }



  private <T extends PTransform<?, ?>> void applyStreamingTransform(

      PTransform<?, ?> transform,

      TransformHierarchy.Node node,

      StreamTransformTranslator<?> translator) {



    @SuppressWarnings("unchecked")

    T typedTransform = (T) transform;



    @SuppressWarnings("unchecked")

    StreamTransformTranslator<T> typedTranslator = (StreamTransformTranslator<T>) translator;



    // create the applied PTransform on the streamingContext

    streamingContext.setCurrentTransform(node.toAppliedPTransform(getPipeline()));

    typedTranslator.translateNode(typedTransform, streamingContext);

  }



  private <T extends PTransform<?, ?>> boolean applyCanTranslate(

      PTransform<?, ?> transform,

      TransformHierarchy.Node node,

      StreamTransformTranslator<?> translator) {



    @SuppressWarnings("unchecked")

    T typedTransform = (T) transform;



    @SuppressWarnings("unchecked")

    StreamTransformTranslator<T> typedTranslator = (StreamTransformTranslator<T>) translator;



    streamingContext.setCurrentTransform(node.toAppliedPTransform(getPipeline()));



    return typedTranslator.canTranslate(typedTransform, streamingContext);

  }



  /**

   * The interface that every Gaia translator of a Unified operator should implement. This interface

   * is for <b>streaming</b> jobs. For examples of such translators see {@link

   * GaiaStreamingTransformTranslators}.

   */

  abstract static class StreamTransformTranslator<T extends PTransform> {



    /** Translate the given transform. */

    abstract void translateNode(T transform, GaiaStreamingTranslationContext context);



    /** Returns true iff this translator can translate the given transform. */

    boolean canTranslate(T transform, GaiaStreamingTranslationContext context) {

      return true;

    }

  }



  @VisibleForTesting

  static class StreamingShardedWriteFactory<UserT, DestinationT, OutputT>

      implements PTransformOverrideFactory<

          PCollection<UserT>,

          WriteFilesResult<DestinationT>,

          WriteFiles<UserT, DestinationT, OutputT>> {

    GaiaPipelineOptions options;



    StreamingShardedWriteFactory(PipelineOptions options) {

      this.options = options.as(GaiaPipelineOptions.class);

    }



    @Override

    public PTransformReplacement<PCollection<UserT>, WriteFilesResult<DestinationT>>

        getReplacementTransform(

            AppliedPTransform<

                    PCollection<UserT>,

                    WriteFilesResult<DestinationT>,

                    WriteFiles<UserT, DestinationT, OutputT>>

                transform) {

      // By default, if numShards is not set WriteFiles will produce one file per bundle. In

      // streaming, there are large numbers of small bundles, resulting in many tiny files.

      // Instead we pick parallelism * 2 to ensure full parallelism, but prevent too-many files.

      Integer jobParallelism = options.getParallelism();



      Preconditions.checkArgument(

          jobParallelism > 0,

          "Parallelism of a job should be greater than 0. Currently set: %s",

          jobParallelism);

      int numShards = jobParallelism * 2;



      try {

        List<PCollectionView<?>> sideInputs =

            WriteFilesTranslation.getDynamicDestinationSideInputs(transform);

        FileBasedSink sink = WriteFilesTranslation.getSink(transform);



        @SuppressWarnings("unchecked")

        WriteFiles<UserT, DestinationT, OutputT> replacement =

            WriteFiles.to(sink).withSideInputs(sideInputs);

        if (WriteFilesTranslation.isWindowedWrites(transform)) {

          replacement = replacement.withWindowedWrites();

        }

        return PTransformReplacement.of(

            PTransformReplacements.getSingletonMainInput(transform),

            replacement.withNumShards(numShards));

      } catch (Exception e) {

        throw new RuntimeException(e);

      }

    }



    @Override

    public Map<PValue, ReplacementOutput> mapOutputs(

        Map<TupleTag<?>, PValue> outputs, WriteFilesResult<DestinationT> newOutput) {

      return Collections.emptyMap();

    }

  }

}