
if [ -z "$1" ] ; then
    type="cpa"
else
    type=$1
fi
echo "开始删除db_cfx.t_data_user_${type}_aggregates_userid表..."
clickhouse-client --password Keepgo123@cys -q "drop table db_cfx.t_data_user_${type}_aggregates_userid
"
clickhouse-client --password Keepgo123@cys -q "
CREATE TABLE db_cfx.t_data_user_${type}_aggregates_userid (
    date Date,
    user_id String,
    unique_oaids_state AggregateFunction(uniqCombined, String)
) ENGINE = AggregatingMergeTree()
PARTITION BY date
ORDER BY (date,user_id)
TTL date + toIntervalDay(7) TO VOLUME 'cold_volume', date + toIntervalDay(31)
SETTINGS index_granularity = 8192, merge_with_ttl_timeout = 86400, storage_policy = 'hot_cold_policy'
"
echo "新建db_cfx.t_data_user_${type}_aggregates_userid表完成..."
# 循环 xxx 从 0 到 30（包含 0 和 30）
for dayNum in {0..30}; do
  echo "开始插入 dayNum = $dayNum 天前的数据..."

  # 执行 ClickHouse 插入语句
  clickhouse-client --password Keepgo123@cys  -q "
    INSERT INTO db_cfx.t_data_user_${type}_aggregates_userid
    SELECT
        activate_date as date,
        user_id as user_id,
        uniqCombinedState(oaid) as unique_oaids_state
    FROM db_cfx.t_data_user_${type}_active_final
    WHERE activate_date = today() - INTERVAL $dayNum DAY  -- 计算 today() 减去 xxx 天
    GROUP BY date, user_id;
  "

  # 检查上一条命令是否执行成功
  if [ $? -eq 0 ]; then
    echo "dayNum = $dayNum 天前的数据插入成功"
  else
    echo "Error: dayNum = $dayNum 天前的数据插入失败"
    # 若需要失败即退出，取消下面一行的注释
    # exit 1
  fi
  sleep 3

  echo "------------------------"
done

echo "db_cfx.t_data_user_${type}_aggregates_userid表所有数据插入完成..."

sleep 10


echo "开始删除db_cfx.t_data_user_${type}_aggregates_channel表..."
clickhouse-client --password Keepgo123@cys -q "drop table db_cfx.t_data_user_${type}_aggregates_channel
"
clickhouse-client --password Keepgo123@cys -q "
CREATE TABLE db_cfx.t_data_user_${type}_aggregates_channel (
    date Date,
    channel String,
    unique_oaids_state AggregateFunction(uniqCombined, String)
) ENGINE = AggregatingMergeTree()
PARTITION BY date
ORDER BY (date,channel)
TTL date + toIntervalDay(7) TO VOLUME 'cold_volume', date + toIntervalDay(31)
SETTINGS index_granularity = 8192, merge_with_ttl_timeout = 86400, storage_policy = 'hot_cold_policy'
"
echo "新建db_cfx.t_data_user_${type}_aggregates_channel表完成..."
# 循环 xxx 从 0 到 30（包含 0 和 30）
for dayNum in {0..30}; do
  echo "开始插入 dayNum = $dayNum 天前的数据..."

  # 执行 ClickHouse 插入语句
  clickhouse-client --password Keepgo123@cys  -q "
    INSERT INTO db_cfx.t_data_user_${type}_aggregates_channel
    SELECT
        activate_date as date,
        channel as channel,
        uniqCombinedState(oaid) as unique_oaids_state
    FROM db_cfx.t_data_user_${type}_active_final
    WHERE activate_date = today() - INTERVAL $dayNum DAY  -- 计算 today() 减去 xxx 天
    GROUP BY date, channel;
  "

  # 检查上一条命令是否执行成功
  if [ $? -eq 0 ]; then
    echo "dayNum = $dayNum 天前的数据插入成功"
  else
    echo "Error: dayNum = $dayNum 天前的数据插入失败"
    # 若需要失败即退出，取消下面一行的注释
    # exit 1
  fi

  sleep 3

  echo "------------------------"
done

echo "db_cfx.t_data_user_${type}_aggregates_channel表所有数据插入完成..."


sleep 10


echo "开始删除db_cfx.t_data_user_${type}_aggregates_brand表..."
clickhouse-client --password Keepgo123@cys -q "drop table db_cfx.t_data_user_${type}_aggregates_brand
"
clickhouse-client --password Keepgo123@cys -q "
CREATE TABLE db_cfx.t_data_user_${type}_aggregates_brand (
    date Date,
    channel String,
    dev_brand String,
    unique_oaids_state AggregateFunction(uniqCombined, String)
) ENGINE = AggregatingMergeTree()
PARTITION BY date
ORDER BY (date,channel,dev_brand)
TTL date + toIntervalDay(7) TO VOLUME 'cold_volume', date + toIntervalDay(31)
SETTINGS index_granularity = 8192, merge_with_ttl_timeout = 86400, storage_policy = 'hot_cold_policy'
"
echo "新建db_cfx.t_data_user_${type}_aggregates_brand表完成..."
# 循环 xxx 从 0 到 30（包含 0 和 30）
for dayNum in {0..30}; do
  echo "开始插入 dayNum = $dayNum 天前的数据..."

  # 执行 ClickHouse 插入语句
  clickhouse-client --password Keepgo123@cys  -q "
    INSERT INTO db_cfx.t_data_user_${type}_aggregates_brand
    SELECT
        activate_date as date,
        channel as channel,
        dev_brand as dev_brand,
        uniqCombinedState(oaid) as unique_oaids_state
    FROM db_cfx.t_data_user_${type}_active_final
    WHERE activate_date = today() - INTERVAL $dayNum DAY  -- 计算 today() 减去 xxx 天
    GROUP BY date, channel, dev_brand;
  "

  # 检查上一条命令是否执行成功
  if [ $? -eq 0 ]; then
    echo "dayNum = $dayNum 天前的数据插入成功"
  else
    echo "Error: dayNum = $dayNum 天前的数据插入失败"
    # 若需要失败即退出，取消下面一行的注释
    # exit 1
  fi

  sleep 3

  echo "------------------------"
done

echo "db_cfx.t_data_user_${type}_aggregates_brand表所有数据插入完成..."


sleep 10


echo "开始删除db_cfx.t_data_user_${type}_aggregates_area表..."
clickhouse-client --password Keepgo123@cys -q "drop table db_cfx.t_data_user_${type}_aggregates_area
"
clickhouse-client --password Keepgo123@cys -q "
CREATE TABLE db_cfx.t_data_user_${type}_aggregates_area (
    date Date,
    channel String,
    activate_area String,
    unique_oaids_state AggregateFunction(uniqCombined, String)
) ENGINE = AggregatingMergeTree()
PARTITION BY date
ORDER BY (date,channel,activate_area)
TTL date + toIntervalDay(7) TO VOLUME 'cold_volume', date + toIntervalDay(31)
SETTINGS index_granularity = 8192, merge_with_ttl_timeout = 86400, storage_policy = 'hot_cold_policy'
"
echo "新建db_cfx.t_data_user_${type}_aggregates_area表完成..."
# 循环 xxx 从 0 到 30（包含 0 和 30）
for dayNum in {0..30}; do
  echo "开始插入 dayNum = $dayNum 天前的数据..."

  # 执行 ClickHouse 插入语句
  clickhouse-client --password Keepgo123@cys  -q "
    INSERT INTO db_cfx.t_data_user_${type}_aggregates_area
    SELECT
        activate_date as date,
        channel as channel,
        activate_area as activate_area,
        uniqCombinedState(oaid) as unique_oaids_state
    FROM db_cfx.t_data_user_${type}_active_final
    WHERE activate_date = today() - INTERVAL $dayNum DAY  -- 计算 today() 减去 xxx 天
    GROUP BY date, channel, activate_area;
  "

  # 检查上一条命令是否执行成功
  if [ $? -eq 0 ]; then
    echo "dayNum = $dayNum 天前的数据插入成功"
  else
    echo "Error: dayNum = $dayNum 天前的数据插入失败"
    # 若需要失败即退出，取消下面一行的注释
    # exit 1
  fi

  sleep 3

  echo "------------------------"
done

echo "db_cfx.t_data_user_${type}_aggregates_area表所有数据插入完成..."


sleep 10


echo "开始删除db_cfx.t_data_user_${type}_aggregates_model表..."
clickhouse-client --password Keepgo123@cys -q "drop table db_cfx.t_data_user_${type}_aggregates_model
"
clickhouse-client --password Keepgo123@cys -q "
CREATE TABLE db_cfx.t_data_user_${type}_aggregates_model (
    date Date,
    channel String,
    dev_brand String,
    dev_model String,
    unique_oaids_state AggregateFunction(uniqCombined, String)
) ENGINE = AggregatingMergeTree()
PARTITION BY date
ORDER BY (date,channel,dev_brand,dev_model)
TTL date + toIntervalDay(7) TO VOLUME 'cold_volume', date + toIntervalDay(31)
SETTINGS index_granularity = 8192, merge_with_ttl_timeout = 86400, storage_policy = 'hot_cold_policy'
"
echo "新建db_cfx.t_data_user_${type}_aggregates_model表完成..."
# 循环 xxx 从 0 到 30（包含 0 和 30）
for dayNum in {0..30}; do
  echo "开始插入 dayNum = $dayNum 天前的数据..."

  # 执行 ClickHouse 插入语句
  clickhouse-client --password Keepgo123@cys  -q "
    INSERT INTO db_cfx.t_data_user_${type}_aggregates_model
    SELECT
        activate_date as date,
        channel as channel,
        dev_brand as dev_brand,
        dev_model as dev_model,
        uniqCombinedState(oaid) as unique_oaids_state
    FROM db_cfx.t_data_user_${type}_active_final
    WHERE activate_date = today() - INTERVAL $dayNum DAY  -- 计算 today() 减去 xxx 天
    GROUP BY date, channel, dev_brand,dev_model;
  "

  # 检查上一条命令是否执行成功
  if [ $? -eq 0 ]; then
    echo "dayNum = $dayNum 天前的数据插入成功"
  else
    echo "Error: dayNum = $dayNum 天前的数据插入失败"
    # 若需要失败即退出，取消下面一行的注释
    # exit 1
  fi

  sleep 3

  echo "------------------------"
done

echo "db_cfx.t_data_user_${type}_aggregates_model表所有数据插入完成..."