/*

 * Licensed to the Apache Software Foundation (ASF) under one

 * or more contributor license agreements.  See the NOTICE file

 * distributed with this work for additional information

 * regarding copyright ownership.  The ASF licenses this file

 * to you under the Apache License, Version 2.0 (the

 * "License"); you may not use this file except in compliance

 * with the License.  You may obtain a copy of the License at

 *

 *     http://www.apache.org/licenses/LICENSE-2.0

 *

 * Unless required by applicable law or agreed to in writing, software

 * distributed under the License is distributed on an "AS IS" BASIS,

 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

 * See the License for the specific language governing permissions and

 * limitations under the License.

 */

package com.bff.gaia.unified.sdk.extensions.sql.impl.rel;



import static com.bff.gaia.unified.vendor.guava.com.google.common.base.MoreObjects.firstNonNull;

import static com.bff.gaia.unified.vendor.guava.com.google.common.base.Preconditions.checkArgument;



import java.io.Serializable;

import java.math.BigDecimal;

import java.util.ArrayList;

import java.util.Comparator;

import java.util.List;

import com.bff.gaia.unified.sdk.coders.Coder;

import com.bff.gaia.unified.sdk.coders.KvCoder;

import com.bff.gaia.unified.sdk.coders.ListCoder;

import com.bff.gaia.unified.sdk.coders.StringUtf8Coder;

import com.bff.gaia.unified.sdk.coders.VarIntCoder;

import com.bff.gaia.unified.sdk.extensions.sql.impl.utils.CalciteUtils;

import com.bff.gaia.unified.sdk.state.StateSpec;

import com.bff.gaia.unified.sdk.state.StateSpecs;

import com.bff.gaia.unified.sdk.state.ValueState;

import com.bff.gaia.unified.sdk.transforms.DoFn;

import com.bff.gaia.unified.sdk.transforms.Flatten;

import com.bff.gaia.unified.sdk.transforms.PTransform;

import com.bff.gaia.unified.sdk.transforms.ParDo;

import com.bff.gaia.unified.sdk.transforms.SerializableFunctions;

import com.bff.gaia.unified.sdk.transforms.Top;

import com.bff.gaia.unified.sdk.transforms.WithKeys;

import com.bff.gaia.unified.sdk.transforms.windowing.GlobalWindows;

import com.bff.gaia.unified.sdk.transforms.windowing.Window;

import com.bff.gaia.unified.sdk.values.KV;

import com.bff.gaia.unified.sdk.values.PCollection;

import com.bff.gaia.unified.sdk.values.PCollectionList;

import com.bff.gaia.unified.sdk.values.Row;

import com.bff.gaia.unified.sdk.values.WindowingStrategy;

import org.apache.calcite.plan.RelOptCluster;

import org.apache.calcite.plan.RelTraitSet;

import org.apache.calcite.rel.RelCollation;

import org.apache.calcite.rel.RelCollationImpl;

import org.apache.calcite.rel.RelFieldCollation;

import org.apache.calcite.rel.RelNode;

import org.apache.calcite.rel.core.Sort;

import org.apache.calcite.rex.RexInputRef;

import org.apache.calcite.rex.RexLiteral;

import org.apache.calcite.rex.RexNode;

import org.apache.calcite.sql.type.SqlTypeName;

import com.bff.gaia.unified.sdk.schemas.Schema;



/**

 * {@code UnifiedRelNode} to replace a {@code Sort} node.

 *

 * <p>Since Unified does not fully supported global sort we are using {@link Top} to implement the

 * {@code Sort} algebra. The following types of ORDER BY are supported:

 *

 * <pre>{@code

 * select * from t order by id desc limit 10;

 * select * from t order by id desc limit 10 offset 5;

 * }</pre>

 *

 * <p>but Order BY without a limit is NOT supported:

 *

 * <pre>{@code

 * select * from t order by id desc

 * }</pre>

 *

 * <h3>Constraints</h3>

 *

 * <ul>

 *   <li>Due to the constraints of {@link Top}, the result of a `ORDER BY LIMIT` must fit into the

 *       memory of a single machine.

 *   <li>Since `WINDOW`(HOP, TUMBLE, SESSION etc) is always associated with `GroupBy`, it does not

 *       make much sense to use `ORDER BY` with `WINDOW`.

 * </ul>

 */

public class UnifiedSortRel extends Sort implements UnifiedRelNode {

  private List<Integer> fieldIndices = new ArrayList<>();

  private List<Boolean> orientation = new ArrayList<>();

  private List<Boolean> nullsFirst = new ArrayList<>();



  private int startIndex = 0;

  private int count;



  public UnifiedSortRel(

      RelOptCluster cluster,

      RelTraitSet traits,

      RelNode child,

      RelCollation collation,

      RexNode offset,

      RexNode fetch) {

    super(cluster, traits, child, collation, offset, fetch);



    List<RexNode> fieldExps = getChildExps();

    RelCollationImpl collationImpl = (RelCollationImpl) collation;

    List<RelFieldCollation> collations = collationImpl.getFieldCollations();

    for (int i = 0; i < fieldExps.size(); i++) {

      RexNode fieldExp = fieldExps.get(i);

      RexInputRef inputRef = (RexInputRef) fieldExp;

      fieldIndices.add(inputRef.getIndex());

      orientation.add(collations.get(i).getDirection() == RelFieldCollation.Direction.ASCENDING);



      RelFieldCollation.NullDirection rawNullDirection = collations.get(i).nullDirection;

      if (rawNullDirection == RelFieldCollation.NullDirection.UNSPECIFIED) {

        rawNullDirection = collations.get(i).getDirection().defaultNullDirection();

      }

      nullsFirst.add(rawNullDirection == RelFieldCollation.NullDirection.FIRST);

    }



    if (fetch == null) {

      throw new UnsupportedOperationException("ORDER BY without a LIMIT is not supported!");

    }



    RexLiteral fetchLiteral = (RexLiteral) fetch;

    count = ((BigDecimal) fetchLiteral.getValue()).intValue();



    if (offset != null) {

      RexLiteral offsetLiteral = (RexLiteral) offset;

      startIndex = ((BigDecimal) offsetLiteral.getValue()).intValue();

    }

  }



  public boolean isLimitOnly() {

    return fieldIndices.isEmpty();

  }



  public int getCount() {

    return count;

  }



  @Override

  public PTransform<PCollectionList<Row>, PCollection<Row>> buildPTransform() {

    return new Transform();

  }



  private class Transform extends PTransform<PCollectionList<Row>, PCollection<Row>> {



    @Override

    public PCollection<Row> expand(PCollectionList<Row> pinput) {

      checkArgument(

          pinput.size() == 1,

          "Wrong number of inputs for %s: %s",

          UnifiedIOSinkRel.class.getSimpleName(),

          pinput);

      PCollection<Row> upstream = pinput.get(0);



      // There is a need to separate ORDER BY LIMIT and LIMIT:

      //  - GroupByKey (used in Top) is not allowed on unbounded data in global window so ORDER BY

      // ... LIMIT

      //    works only on bounded data.

      //  - Just LIMIT operates on unbounded data, but across windows.

      if (fieldIndices.isEmpty()) {

        // TODO(https://issues.apache.org/jira/projects/BEAM/issues/BEAM-4702)

        // Figure out which operations are per-window and which are not.



        return upstream

            .apply(Window.into(new GlobalWindows()))

            .apply(new LimitTransform<>(startIndex))

            .setRowSchema(CalciteUtils.toSchema(getRowType()));

      } else {



        WindowingStrategy<?, ?> windowingStrategy = upstream.getWindowingStrategy();

        if (!(windowingStrategy.getWindowFn() instanceof GlobalWindows)) {

          throw new UnsupportedOperationException(

              String.format(

                  "`ORDER BY` is only supported for %s, actual windowing strategy: %s",

                  GlobalWindows.class.getSimpleName(), windowingStrategy));

        }



        ReversedUnifiedSqlRowComparator comparator =

            new ReversedUnifiedSqlRowComparator(fieldIndices, orientation, nullsFirst);



        // first find the top (offset + count)

        PCollection<List<Row>> rawStream =

            upstream

                .apply(

                    "extractTopOffsetAndFetch",

                    Top.of(startIndex + count, comparator).withoutDefaults())

                .setCoder(ListCoder.of(upstream.getCoder()));



        // strip the `leading offset`

        if (startIndex > 0) {

          rawStream =

              rawStream

                  .apply(

                      "stripLeadingOffset",

                      ParDo.of(new SubListFn<>(startIndex, startIndex + count)))

                  .setCoder(ListCoder.of(upstream.getCoder()));

        }



        return rawStream

            .apply("flatten", Flatten.iterables())

            .setSchema(

                CalciteUtils.toSchema(getRowType()),

                SerializableFunctions.identity(),

                SerializableFunctions.identity());

      }

    }

  }



  private class LimitTransform<T> extends PTransform<PCollection<T>, PCollection<T>> {

    private final int startIndex;



    public LimitTransform(int startIndex) {

      this.startIndex = startIndex;

    }



    @Override

    public PCollection<T> expand(PCollection<T> input) {

      Coder<T> coder = input.getCoder();

      PCollection<KV<String, T>> keyedRow =

          input.apply(WithKeys.of("DummyKey")).setCoder(KvCoder.of(StringUtf8Coder.of(), coder));



      return keyedRow.apply(ParDo.of(new LimitFn<T>(getCount(), startIndex)));

    }

  }



  private static class LimitFn<T> extends DoFn<KV<String, T>, T> {

    private final Integer limitCount;

    private final Integer startIndex;



    public LimitFn(int c, int s) {

      limitCount = c;

      startIndex = s;

    }



    @StateId("counter")

    private final StateSpec<ValueState<Integer>> counterState = StateSpecs.value(VarIntCoder.of());



    @StateId("skipped_rows")

    private final StateSpec<ValueState<Integer>> skippedRowsState =

        StateSpecs.value(VarIntCoder.of());



    @ProcessElement

    public void processElement(

        ProcessContext context,

        @StateId("counter") ValueState<Integer> counterState,

        @StateId("skipped_rows") ValueState<Integer> skippedRowsState) {

      Integer toSkipRows = firstNonNull(skippedRowsState.read(), startIndex);

      if (toSkipRows == 0) {

        int current = firstNonNull(counterState.read(), 0);

        if (current < limitCount) {

          counterState.write(current + 1);

          context.output(context.element().getValue());

        }

      } else {

        skippedRowsState.write(toSkipRows - 1);

      }

    }

  }



  private static class SubListFn<T> extends DoFn<List<T>, List<T>> {

    private int startIndex;

    private int endIndex;



    public SubListFn(int startIndex, int endIndex) {

      this.startIndex = startIndex;

      this.endIndex = endIndex;

    }



    @ProcessElement

    public void processElement(ProcessContext ctx) {

      ctx.output(ctx.element().subList(startIndex, endIndex));

    }

  }



  @Override

  public Sort copy(

      RelTraitSet traitSet,

      RelNode newInput,

      RelCollation newCollation,

      RexNode offset,

      RexNode fetch) {

    return new UnifiedSortRel(getCluster(), traitSet, newInput, newCollation, offset, fetch);

  }



  private static class UnifiedSqlRowComparator implements Comparator<Row>, Serializable {

    private List<Integer> fieldsIndices;

    private List<Boolean> orientation;

    private List<Boolean> nullsFirst;



    public UnifiedSqlRowComparator(

        List<Integer> fieldsIndices, List<Boolean> orientation, List<Boolean> nullsFirst) {

      this.fieldsIndices = fieldsIndices;

      this.orientation = orientation;

      this.nullsFirst = nullsFirst;

    }



    @Override

    public int compare(Row row1, Row row2) {

      for (int i = 0; i < fieldsIndices.size(); i++) {

        int fieldIndex = fieldsIndices.get(i);

        int fieldRet = 0;



        Schema.FieldType fieldType = row1.getSchema().getField(fieldIndex).getType();

        SqlTypeName sqlTypeName = CalciteUtils.toSqlTypeName(fieldType);

        // whether NULL should be ordered first or last(compared to non-null values) depends on

        // what user specified in SQL(NULLS FIRST/NULLS LAST)

        boolean isValue1Null = (row1.getValue(fieldIndex) == null);

        boolean isValue2Null = (row2.getValue(fieldIndex) == null);

        if (isValue1Null && isValue2Null) {

          continue;

        } else if (isValue1Null && !isValue2Null) {

          fieldRet = -1 * (nullsFirst.get(i) ? -1 : 1);

        } else if (!isValue1Null && isValue2Null) {

          fieldRet = 1 * (nullsFirst.get(i) ? -1 : 1);

        } else {

          switch (sqlTypeName) {

            case TINYINT:

            case SMALLINT:

            case INTEGER:

            case BIGINT:

            case FLOAT:

            case DOUBLE:

            case VARCHAR:

            case DATE:

            case TIMESTAMP:

              Comparable v1 = (Comparable) row1.getValue(fieldIndex);

              Comparable v2 = (Comparable) row2.getValue(fieldIndex);

              fieldRet = v1.compareTo(v2);

              break;

            default:

              throw new UnsupportedOperationException(

                  "Data type: " + sqlTypeName + " not supported yet!");

          }

        }



        fieldRet *= (orientation.get(i) ? 1 : -1);



        if (fieldRet != 0) {

          return fieldRet;

        }

      }

      return 0;

    }

  }



  private static class ReversedUnifiedSqlRowComparator implements Comparator<Row>, Serializable {

    private final UnifiedSqlRowComparator comparator;



    public ReversedUnifiedSqlRowComparator(

        List<Integer> fieldsIndices, List<Boolean> orientation, List<Boolean> nullsFirst) {

      comparator = new UnifiedSqlRowComparator(fieldsIndices, orientation, nullsFirst);

    }



    @Override

    public int compare(Row row1, Row row2) {

      return comparator.compare(row2, row1);

    }

  }

}