package com.slimbloody.controller;

import com.google.protobuf.InvalidProtocolBufferException;
import com.slimbloody.configs.UserTcmQualityVersionConfig;
import com.slimbloody.dto.GetParam;
import com.slimbloody.dto.WriteParam;
import com.slimbloody.hbase.HBaseRowKeyEnum;
import com.slimbloody.hbase.HBaseTableInfoEnum;
import com.slimbloody.service.PBService;
import com.ysb.hbase.dml.HbaseDmlService;
import com.ysb.hbase.model.HbaseGetParam;
import com.ysb.hbase.model.HbasePutParam;
import com.ysb.hbase.model.ValueModel;
import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.util.StopWatch;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.io.IOException;
import java.util.Arrays;
import java.util.Objects;

@Slf4j
@RestController
@RequestMapping("/hbase")
public class HBaseController {
  @Autowired
  private PBService pBService;

  @Autowired
  private HbaseDmlService hbaseDmlService;

  @Autowired
  private ApplicationContext applicationContext;

  @Autowired
  private UserTcmQualityVersionConfig userTcmQualityVersionConfig;

  public static final String key = "uid+hbase";

  public int batchSize = 300;



  /*
  @PostMapping("/buildAvg")
  public void buildAvg() {
    for (int i = 0; i < 800; ++i) {
      TcmQualityFeature.TcmItemAvgPrice.Builder itemBuilder = TcmQualityFeature.TcmItemAvgPrice.newBuilder();
      itemBuilder.setWeight(i);
      TcmQualityFeature.TcmItemAvgPrice avgPrice = itemBuilder.build();

      String key = String.format("%03d", i);
      TcmQualityFeature.TcmAvgPrice.Builder builder = TcmQualityFeature.TcmAvgPrice.newBuilder();
      builder.addItems(avgPrice);
      TcmQualityFeature.TcmAvgPrice tcmAvgPrice = builder.build();

      boolean success = hbaseDmlService.putData(
        new HbasePutParam(
          HBaseTableInfoEnum.TcmAvgPrice.getTableNameWithVersion(1),
          HBaseTableInfoEnum.TcmAvgPrice.getCf(),
          key,
          Arrays.asList(new ValueModel(HBaseTableInfoEnum.TcmAvgPrice.getQualifier(), tcmAvgPrice.toByteArray()))
        )
      );
      if (!success) {
        log.error("{} hbase write failed", key);
      }
    }
  }

  @PostMapping("scanTest")
  public void scanTest() throws IOException {
    StopWatch stopWatch = new StopWatch();
    stopWatch.start();
    Table table = hbaseDmlService.getConnection().getTable(TableName.valueOf(HBaseTableInfoEnum.TcmAvgPrice.getTableNameWithVersion(1)));
    byte[] cfBytes = HBaseTableInfoEnum.TcmAvgPrice.getCf().getBytes();
    byte[] qualifierBytes = HBaseTableInfoEnum.TcmAvgPrice.getQualifier().getBytes();

    String startRow = ""; // 初始行键为空，表示从表头开始
    boolean hasMore = true;
    int cacheSize = 50;

    while (hasMore) {
      // 创建 Scan 对象并设置起始行键
      Scan scan = new Scan();
      scan.addColumn(cfBytes, qualifierBytes);
      scan.withStartRow(Bytes.toBytes(startRow)); // 从上次扫描结束的行键开始
      scan.setCaching(cacheSize); // 从服务器端一次获取的行数
      // scan.setBatch(1); // 每行中读取的列数
      // scan 'my_table', {CACHING => 50, LIMIT => 100}
      // caching => 50：每次从服务器端获取 50 行数据。
      // limit => 100：客户端最终只返回前 100 行。

      // 扫描表
      try (ResultScanner scanner = table.getScanner(scan)) {
        int count = 0;
        for (Result result : scanner) {
          count++;

          // 获取数据（示例，假设有列族 cf 和限定符 q1）
          byte[] value = result.getValue(cfBytes, qualifierBytes);
          TcmQualityFeature.TcmAvgPrice.parseFrom(value);

          // 更新下一次扫描的起始行键
          // 获取行键
          String rowKey = Bytes.toString(result.getRow());
          System.out.println("rowKey: " + rowKey);
          startRow = rowKey;
        }

        // 如果当前批次少于300条,说明表已经遍历完
        if (count < cacheSize) {
          hasMore = false;
        } else {
          // 防止重复扫描当前批次最后一条，下一批次从下一行开始
          startRow = incrementRowKey(startRow);
        }
      }
    }

    stopWatch.stop();
    log.info("taskInfo: {}", stopWatch.getLastTaskInfo());
  }

  // 字节从尾到头+1,不进位停止
  private String incrementRowKey(String rowKey) {
    byte[] rowKeyBytes = Bytes.toBytes(rowKey);
    for (int i = rowKeyBytes.length - 1; i >= 0; i--) {
      rowKeyBytes[i]++;
      if (rowKeyBytes[i] != 0) {
        break;
      }
    }
    return Bytes.toString(rowKeyBytes);
  }

  @PostMapping("configTest")
  public void configTest() {
    userTcmQualityVersionConfig.getCurrentVersion();
    System.out.println();
  }

  @PostMapping("/pb")
  public void pb(@RequestBody Integer size) throws InvalidProtocolBufferException {
    TcmQualityFeature.UserTcmQualityFeature userTcmQualityFeature = pBService.buildAvgList(size);
    pBService.recordCost(userTcmQualityFeature, size);
  }

  @PostMapping("/writeToHBase")
  public void writeToHBase(@RequestBody WriteParam param) throws InvalidProtocolBufferException {
    TcmQualityFeature.UserTcmQualityFeature UserTcmQualityFeature = null;
    if (Objects.equals(param.getType(), "avg")) {
      UserTcmQualityFeature = pBService.buildAvgList(param.getSize());
    } else if (Objects.equals(param.getType(), "max")) {
      UserTcmQualityFeature = pBService.buildMaxList(param.getSize());
    } else {
      throw new RuntimeException("type error");
    }

    byte[] byteArray = UserTcmQualityFeature.toByteArray();
    boolean success = hbaseDmlService.putData(
      new HbasePutParam(
        HBaseRowKeyEnum.UserTcmQualityFeature.getTable(),
        HBaseRowKeyEnum.UserTcmQualityFeature.getCf(),
        key,
        Arrays.asList(new ValueModel(HBaseRowKeyEnum.UserTcmQualityFeature.getQualifier(), byteArray))
      )
    );
    log.info("{}", success);
  }

  @PostMapping("/readFromHBase")
  public void readFromHBase(@RequestBody String key) throws InvalidProtocolBufferException {
    ValueModel value = hbaseDmlService.getValue(
      new HbaseGetParam(
        HBaseRowKeyEnum.UserTcmQualityFeature.getTable(),
        HBaseRowKeyEnum.UserTcmQualityFeature.getCf(),
        this.key,
        HBaseRowKeyEnum.UserTcmQualityFeature.getQualifier()
      )
    );
    StopWatch stopWatch = new StopWatch();
    stopWatch.start("get");
    TcmQualityFeature.UserTcmQualityFeature priceFeatureList = TcmQualityFeature.UserTcmQualityFeature.parseFrom(value.getValue());
    stopWatch.stop();
    log.info("{}", priceFeatureList);
    log.info(
      "cost: {}ns, size: {}B, len: {}",
      stopWatch.getLastTaskInfo().getTimeNanos(),
      priceFeatureList.getSerializedSize(),
      priceFeatureList.getItemList().size()
    );
  }

  @PostMapping("/readNonexistentTable")
  public void readNonexistentTable(@RequestBody GetParam getParam) throws InvalidProtocolBufferException {
    // org.apache.hadoop.hbase.TableNotFoundException
    ValueModel value = null;
    try {
      value = hbaseDmlService.getValue(
        new HbaseGetParam(
          getParam.getTable(),
          getParam.getCf(),
          getParam.getKey(),
          getParam.getQualifier()
        )
      );
    } catch (Exception e) {

    }
    System.out.println("1234");
  }

   */
}
