import type {
  AggregationsAggregationContainer,
  QueryDslBoolQuery,
} from "@elastic/elasticsearch/lib/api/types";
import type { SearchRequest } from "@elastic/elasticsearch/lib/api/typesWithBodyKey";
import { TRPCError } from "@trpc/server";
import {
  getGroup,
  getMetric,
  percentileToPercent,
  type TimeseriesInputType,
} from "~/server/analytics/registry";
import { env } from "../../env.mjs";
import {
  currentVsPreviousDates,
  generateFilterConditions,
  generateTracesPivotQueryConditions,
} from "../api/routers/analytics/common";
import { prisma } from "../db";
import { esClient, TRACE_INDEX } from "../elasticsearch";
import {
  analyticsPipelines,
  type FlattenAnalyticsGroupsEnum,
  pipelineAggregationsToElasticSearch,
  type SeriesInputType,
} from "./registry";
import {
  type PercentileAggregationTypes,
  percentileAggregationTypes,
} from "./types";
import { filterOutEmptyFilters } from "./utils";

const labelsMapping: Partial<
  Record<
    FlattenAnalyticsGroupsEnum,
    (projectId: string) => Promise<Record<string, string>>
  >
> = {
  "topics.topics": async (projectId: string) => {
    const topics = await prisma.topic.findMany({
      where: { projectId },
      select: { id: true, name: true },
    });

    return Object.fromEntries(topics.map((topic) => [topic.id, topic.name]));
  },

  "evaluations.evaluation_passed": async () => {
    return {
      0: "failed",
      1: "passed",
    };
  },
};

export const timeseries = async (input: TimeseriesInputType) => {
  if (env.IS_QUICKWIT) {
    // TODO: Remove this once Quickwit v0.9 is released as it supports cardinality
    input.series = input.series.map((series) => ({
      ...series,
      aggregation:
        series.aggregation === "cardinality" ? "terms" : series.aggregation,
    }));
  }

  const { previousPeriodStartDate, startDate, endDate } =
    currentVsPreviousDates(
      input,
      typeof input.timeScale === "number" ? input.timeScale : undefined,
    );

  // Calculate total time span in minutes
  const totalMinutes = (endDate.getTime() - startDate.getTime()) / (1000 * 60);

  // Adjust timeScale to avoid too many buckets (max 1000 buckets)
  let adjustedTimeScale = input.timeScale;
  if (typeof input.timeScale === "number") {
    const estimatedBuckets = totalMinutes / input.timeScale;
    if (estimatedBuckets > 1000) {
      // Round up to one full day at least
      adjustedTimeScale = 24 * 60;
    }
  }

  let aggs = Object.fromEntries(
    input.series.flatMap(
      (
        {
          metric,
          aggregation,
          pipeline,
          key,
          subkey,
          filters,
          asPercent,
        }: SeriesInputType,
        index: number,
      ) => {
        const metric_ = getMetric(metric);

        if (metric_.requiresKey && !metric_.requiresKey.optional && !key) {
          throw new TRPCError({
            code: "BAD_REQUEST",
            message: `Metric ${metric} requires a key to be defined`,
          });
        }
        if (metric_.requiresSubkey && !subkey) {
          throw new TRPCError({
            code: "BAD_REQUEST",
            message: `Metric ${metric} requires a subkey to be defined`,
          });
        }

        const metricAggregations = metric_.aggregation(
          index,
          aggregation,
          key,
          subkey,
        );

        let aggregationQuery: Record<string, AggregationsAggregationContainer> =
          metricAggregations;
        let pipelinePath_: string | undefined;
        let pipelineBucketsMetricPath: string | undefined;
        if (pipeline) {
          // Fix needed for OpenSearch, it doesn't support dots in field names when referenced from buckets_path
          const metricWithoutDots = metric.replace(/\./g, "__");
          const pipelineBucketsPath = `${index}__${metricWithoutDots}__${aggregation}__${pipeline.field}`;
          const metricPath = metric_
            .extractionPath(index, aggregation, key, subkey)
            // Fix for working with percentiles too
            .split(">values")[0]
            ?.replace(/\./g, "__");
          pipelinePath_ = esSafePipelinePath(
            index,
            metric,
            aggregation,
            pipeline,
          );

          pipelineBucketsMetricPath =
            `${pipelineBucketsPath}>${metricPath}` +
            (percentileAggregationTypes.includes(aggregation as any)
              ? `.${
                  percentileToPercent[aggregation as PercentileAggregationTypes]
                }`
              : "");

          aggregationQuery = {
            [pipelineBucketsPath]: {
              terms: {
                field: analyticsPipelines[pipeline.field].field,
                size: 10000,
              },
              aggs: aggregationQuery,
            },
            [pipelinePath_]: {
              [pipelineAggregationsToElasticSearch[pipeline.aggregation]]: {
                buckets_path: pipelineBucketsMetricPath,
                gap_policy: "insert_zeros",
              },
            },
          };
        }

        if (Object.keys(filterOutEmptyFilters(filters)).length > 0) {
          const filtersWrapperKey = pipelinePath_
            ? `${pipelinePath_}__filters`
            : `${Object.keys(aggregationQuery)[0]}__filters`;

          const previousAggregationQuery = { ...aggregationQuery };
          aggregationQuery = {
            [filtersWrapperKey]: {
              filter: {
                bool: {
                  must: generateFilterConditions(filters ?? {}),
                } as QueryDslBoolQuery,
              },
              aggs: aggregationQuery,
            },
          } as Record<string, AggregationsAggregationContainer>;

          if (asPercent) {
            const extractionPath = getMetric(metric).extractionPath(
              index,
              aggregation,
              key,
              subkey,
            );
            const percentageWrapperKey = pipelinePath_
              ? `${pipelinePath_}__percent`
              : `${Object.keys(aggregationQuery)[0]}__percent`;

            aggregationQuery = {
              ...aggregationQuery,
              ...previousAggregationQuery,
              [percentageWrapperKey]: {
                bucket_script: {
                  buckets_path: {
                    filtered: `${filtersWrapperKey}>${
                      pipelinePath_ ?? extractionPath
                    }`,
                    all: pipelinePath_ ?? extractionPath,
                  },
                  script: {
                    source:
                      "params.all > 0 ? (params.filtered / params.all) * 100 : 0",
                  },
                },
              },
            };
          }
        }

        return Object.entries(aggregationQuery);
      },
    ),
  );

  let groupLabelsMapping: Record<string, string> | undefined;
  if (input.groupBy) {
    const group = getGroup(input.groupBy);
    aggs = group.aggregation(aggs);
    if (labelsMapping[input.groupBy]) {
      groupLabelsMapping = await labelsMapping[input.groupBy]?.(
        input.projectId,
      );
    }
  }

  const { pivotIndexConditions } = generateTracesPivotQueryConditions({
    ...input,
    startDate: previousPeriodStartDate.getTime(),
  });

  const tracesPerDayAggs = {
    date_histogram: {
      field: "timestamps.started_at",
      fixed_interval: adjustedTimeScale ? `${adjustedTimeScale}m` : "1d",
      min_doc_count: 0,
      time_zone: input.timeZone,
    },
    aggs,
  };

  const queryBody: SearchRequest["body"] = {
    size: 0,
    query: pivotIndexConditions,
    aggs: {
      previous_vs_current: {
        range: {
          field: "timestamps.started_at",
          ranges: [
            {
              key: "previous",
              from: env.IS_QUICKWIT
                ? previousPeriodStartDate.getTime() * 1000 * 1000
                : previousPeriodStartDate.toISOString(),
              to: env.IS_QUICKWIT
                ? startDate.getTime() * 1000 * 1000
                : startDate.toISOString(),
            },
            {
              key: "current",
              from: env.IS_QUICKWIT
                ? startDate.getTime() * 1000 * 1000
                : startDate.toISOString(),
              to: env.IS_QUICKWIT
                ? endDate.getTime() * 1000 * 1000
                : endDate.toISOString(),
            },
          ],
        },
        aggs:
          input.timeScale === "full"
            ? aggs
            : {
                traces_per_day: tracesPerDayAggs,
              },
      },
    } as any,
  };

  const client = await esClient({ projectId: input.projectId });
  const result = (await client.search({
    index: TRACE_INDEX.for(input.startDate),
    body: queryBody,
  })) as any;

  const parseAggregations = (
    buckets: any,
  ): ({ date: string } & Record<string, number>)[] => {
    return buckets.map((day_bucket: any) => {
      let aggregationResult: Record<string, any> = {
        date: day_bucket.key_as_string ?? day_bucket.from_as_string,
      };

      if (input.groupBy) {
        const group = getGroup(input.groupBy);
        const extractionPath = group.extractionPath();
        let buckets = day_bucket;
        const [pathsBeforeBuckets, pathsAfterBuckets] =
          extractionPath.split(">buckets");
        for (const path of pathsBeforeBuckets!.split(">")) {
          buckets = buckets[path];
        }
        buckets = buckets.buckets;

        if (!buckets) {
          throw `Could not find buckets for ${input.groupBy} groupBy at ${extractionPath}`;
        }

        const groupResult = Object.fromEntries(
          (Array.isArray(buckets)
            ? buckets.map((group_bucket: any) => {
                return [
                  groupLabelsMapping
                    ? groupLabelsMapping[group_bucket.key]
                    : group_bucket.key,
                  extractResultForBucket(
                    input.series,
                    pathsAfterBuckets,
                    group_bucket,
                  ),
                ];
              })
            : Object.entries(buckets).map(
                ([key, group_bucket]: [string, any]) => {
                  return [
                    groupLabelsMapping ? groupLabelsMapping[key] : key,
                    extractResultForBucket(
                      input.series,
                      pathsAfterBuckets,
                      group_bucket,
                    ),
                  ];
                },
              )
          ).filter(([key, _]) => key !== undefined),
        );

        aggregationResult = {
          ...aggregationResult,
          [input.groupBy]: groupResult,
        };
      } else {
        aggregationResult = {
          ...aggregationResult,
          ...extractResultForBucket(input.series, undefined, day_bucket),
        };
      }

      return aggregationResult;
    });
  };

  if (input.timeScale === "full") {
    const [previous, current] =
      result.aggregations?.previous_vs_current.buckets.filter(
        (bucket: any) => bucket.key === "previous" || bucket.key === "current",
      );

    return {
      previousPeriod: parseAggregations([previous]),
      currentPeriod: parseAggregations([current]),
    };
  }

  const currentPeriod = parseAggregations(
    result.aggregations.previous_vs_current.buckets.find(
      (bucket: any) => bucket.key === "current",
    ).traces_per_day.buckets,
  );

  let previousPeriod = parseAggregations(
    result.aggregations.previous_vs_current.buckets.find(
      (bucket: any) => bucket.key === "previous",
    ).traces_per_day.buckets,
  );
  // Correction for when a single day is selected and we end up querying for 2 days for previous period and dates don't align
  previousPeriod = previousPeriod.slice(
    Math.max(0, previousPeriod.length - currentPeriod.length),
  );

  return {
    previousPeriod: previousPeriod,
    currentPeriod: currentPeriod,
  };
};

const extractResultForBucket = (
  seriesList: SeriesInputType[],
  pathsAfterBuckets: string | undefined,
  bucket: any,
) => {
  return Object.fromEntries(
    seriesList.flatMap((series, index) => {
      return Object.entries(
        extractResult(series, index, pathsAfterBuckets, bucket),
      );
    }),
  );
};

const extractResult = (
  {
    metric,
    aggregation,
    pipeline,
    key,
    subkey,
    filters,
    asPercent,
  }: SeriesInputType,
  index: number,
  pathsAfterBuckets: string | undefined,
  result: any,
) => {
  let current = result;
  if (pathsAfterBuckets) {
    for (const path of pathsAfterBuckets.split(">")) {
      if (path) {
        current = current[path];
      }
    }
  }

  const metric_ = getMetric(metric);
  const extractionPath = metric_.extractionPath(
    index,
    aggregation,
    key,
    subkey,
  );
  let paths = extractionPath.split(">");
  if (pipeline) {
    const pipelinePath_ = pipelinePath(index, metric, aggregation, pipeline);
    const esSafePipelinePath_ = esSafePipelinePath(
      index,
      metric,
      aggregation,
      pipeline,
    );
    if (asPercent) {
      return {
        [pipelinePath_]:
          current?.[`${esSafePipelinePath_}__percent`]?.value ?? 0,
      };
    }
    if (Object.keys(filterOutEmptyFilters(filters)).length > 0) {
      const container = current?.[`${esSafePipelinePath_}__filters`] ?? current;
      const value = container?.[esSafePipelinePath_]?.value ?? 0;
      return { [pipelinePath_]: value };
    }
    return { [pipelinePath_]: current?.[esSafePipelinePath_]?.value ?? 0 };
  }

  if (Object.keys(filterOutEmptyFilters(filters)).length > 0) {
    const firstPath = paths[0];
    paths.unshift(`${firstPath}__filters`);

    if (asPercent) {
      paths = [`${firstPath}__percent`];
    }
  }

  for (const path of paths) {
    // eslint-disable-next-line @typescript-eslint/prefer-optional-chain
    if (!current || !current[path]) {
      // Include key in series name if it's provided and the metric supports it
      const hasKeySupport = metric_.requiresKey !== undefined;
      const seriesName =
        key && hasKeySupport
          ? `${index}/${metric}/${aggregation.replace(
              "terms",
              "cardinality",
            )}/${key}`
          : `${index}/${metric}/${aggregation.replace("terms", "cardinality")}`;
      return { [seriesName]: 0 };
    }
    current = current[path];
  }

  let value = current && typeof current === "object" ? current.value : current;
  if (aggregation === "terms" && typeof current === "object") {
    if (metric === "metadata.trace_id") {
      value = current.sum_other_doc_count;
    } else {
      value = current.buckets.length;
    }
  }
  // Include key in series name if it's provided and the metric supports it
  const hasKeySupport = metric_.requiresKey !== undefined;
  const seriesName =
    key && hasKeySupport
      ? `${index}/${metric}/${aggregation.replace(
          "terms",
          "cardinality",
        )}/${key}`
      : `${index}/${metric}/${aggregation.replace("terms", "cardinality")}`;

  return {
    [seriesName]: value,
  };
};

const pipelinePath = (
  index: number,
  metric: SeriesInputType["metric"],
  aggregation: SeriesInputType["aggregation"],
  pipeline: Required<SeriesInputType>["pipeline"],
) =>
  `${index}/${metric}/${aggregation}/${pipeline.field}/${pipeline.aggregation}`;

const esSafePipelinePath = (
  index: number,
  metric: SeriesInputType["metric"],
  aggregation: SeriesInputType["aggregation"],
  pipeline: Required<SeriesInputType>["pipeline"],
) => pipelinePath(index, metric, aggregation, pipeline).replace(/\./g, "__");
