{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "db5bcfc2-7b64-4b82-ab32-388e5a612dd9",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "25/09/11 11:05:06 WARN Utils: Your hostname, dobdeMacBook-Pro.local resolves to a loopback address: 127.0.0.1; using 192.168.12.18 instead (on interface en0)\n",
      "25/09/11 11:05:06 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address\n",
      "25/09/11 11:05:07 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable\n",
      "Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties\n",
      "Setting default log level to \"WARN\".\n",
      "To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).\n"
     ]
    }
   ],
   "source": [
    "# 如果不存在，执行以下命令安装：pip install pyspark\n",
    "from pyspark.sql import SparkSession\n",
    "spark = SparkSession\\\n",
    "        .builder\\\n",
    "        .appName(\"tt20250911\")\\\n",
    "        .getOrCreate()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "id": "2661eafc-0467-4661-bfad-5f52e197a886",
   "metadata": {},
   "outputs": [
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "[Stage 0:>                                                          (0 + 5) / 5]"
     ]
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "+--------------------+--------+----------+\n",
      "|                gaid|is_valid|is_invalid|\n",
      "+--------------------+--------+----------+\n",
      "|83a3783c-ba6e-47b...|    true|     false|\n",
      "|    [advertising_id]|   false|      true|\n",
      "|      advertising-id|    true|     false|\n",
      "|00000000–0000–000...|   false|      true|\n",
      "|ad1d5557-0be1-424...|    true|     false|\n",
      "+--------------------+--------+----------+\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "                                                                                "
     ]
    }
   ],
   "source": [
    "sql=\"\"\"\n",
    "with t_data as (\n",
    "select '83a3783c-ba6e-47bf-b104-f16b831ad201' gaid\n",
    "union all\n",
    "select '[advertising_id]' gaid\n",
    "union all\n",
    "select 'advertising-id' gaid\n",
    "union all\n",
    "select '00000000–0000–0000–0000–000000000000' gaid\n",
    "union all\n",
    "select 'ad1d5557-0be1-424d-8d96-a32811111119' gaid\n",
    ")\n",
    "select gaid \n",
    ",gaid rlike '^(?!.*([a-zA-Z0-9-])\\1{99})[a-zA-Z0-9-]{10,}$' AS is_valid\n",
    ",gaid not rlike '^(?!.*([a-zA-Z0-9-])\\1{99})[a-zA-Z0-9-]{10,}$' AS is_invalid\n",
    "from t_data\n",
    "\"\"\"\n",
    "df=spark.sql(sql)\n",
    "df.show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "9be91150-f607-44c5-8def-e2118e0c90cc",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "+--------+\n",
      "|is_valid|\n",
      "+--------+\n",
      "|    true|\n",
      "|   false|\n",
      "+--------+\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "25/09/10 16:10:16 WARN HeartbeatReceiver: Removing executor driver with no recent heartbeats: 231582 ms exceeds timeout 120000 ms\n",
      "25/09/10 16:10:16 WARN SparkContext: Killing executors is not supported by current scheduler.\n"
     ]
    }
   ],
   "source": [
    "df=spark.sql(\"\"\"\n",
    "select 'ad1d5557-0be1-424d-8d96-a32811111119' rlike '^(?!.*([a-zA-Z0-9-])\\1{7})[a-zA-Z0-9-]{10,}$' AS is_valid\n",
    "union all\n",
    "select 'ad1d5557-0be1-424d-8d96-a32811111119' rlike '^(?!.*([a-zA-Z0-9-])\\\\1{7})[a-zA-Z0-9-]{10,}$' AS is_valid\n",
    "\"\"\").show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 15,
   "id": "a4c4c18d-ced5-4aeb-8371-db10cedd7805",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "+---+------+\n",
      "|age|   arr|\n",
      "+---+------+\n",
      "| 20|[b, a]|\n",
      "| 30|[d, e]|\n",
      "+---+------+\n",
      "\n"
     ]
    }
   ],
   "source": [
    "spark.sql(\"\"\"\n",
    "with t_data0 as (\n",
    "    select 'a' gaid,20 age\n",
    "    union all\n",
    "    select 'a' gaid,20 age\n",
    "    union all\n",
    "    select 'b' gaid,20 age\n",
    "    union all\n",
    "    select 'a' gaid,20 age\n",
    "    union all\n",
    "    select 'd' gaid,30 age\n",
    "    union all\n",
    "    select 'e' gaid, 30 age\n",
    "    union all\n",
    "    select null gaid, 30 age\n",
    "),\n",
    "t_data as (\n",
    "    select *,row_number() over (partition by age,gaid order by 1) rn\n",
    "    from t_data0\n",
    ")\n",
    "select \n",
    "    age,\n",
    "      collect_list(case when rn=1 then gaid else null end) arr\n",
    "from t_data group by 1\n",
    "\"\"\").show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 19,
   "id": "76d55b9e-a70e-4385-9ba3-b8e56c01dc1d",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "+----------------+\n",
      "|       user_json|\n",
      "+----------------+\n",
      "|{\"k\":20,\"v\":\"a\"}|\n",
      "|{\"k\":20,\"v\":\"b\"}|\n",
      "|{\"k\":30,\"v\":\"d\"}|\n",
      "|{\"k\":30,\"v\":\"e\"}|\n",
      "|        {\"k\":30}|\n",
      "+----------------+\n",
      "\n"
     ]
    }
   ],
   "source": [
    "spark.sql(\"\"\"\n",
    "with t_data as (\n",
    "    select 'a' gaid,20 age\n",
    "    union all\n",
    "    select 'b' gaid,20 age\n",
    "    union all\n",
    "    select 'd' gaid,30 age\n",
    "    union all\n",
    "    select 'e' gaid, 30 age\n",
    "    union all\n",
    "    select null gaid, 30 age\n",
    ")\n",
    "SELECT \n",
    "  to_json(\n",
    "    struct(\n",
    "      age AS k,  -- 指定 JSON 中字段名\n",
    "      gaid AS v  -- 指定 JSON 中字段名\n",
    "    )\n",
    "  ) AS user_json\n",
    "FROM t_data\n",
    "\"\"\").show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 31,
   "id": "e445d307-eef7-4ebf-b90a-578b97e919cb",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "+----+----------------------+\n",
      "|gaid|distinct_combined_list|\n",
      "+----+----------------------+\n",
      "|   b|  [{\"k\":100}, {\"k\":...|\n",
      "|   a|  [{\"k\":100}, {\"k\":...|\n",
      "+----+----------------------+\n",
      "\n"
     ]
    }
   ],
   "source": [
    "spark.sql(\"\"\"\n",
    "with t_data as (\n",
    "    select 'a' gaid,20 age,null score\n",
    "    union all\n",
    "    select 'a' gaid,20 age,100 score\n",
    "    union all\n",
    "    select 'a' gaid,null age,null score\n",
    "    union all\n",
    "    select 'a' gaid,20 age,null score  -- 与第一条重复，用于测试去重\n",
    ")\n",
    "SELECT \n",
    "  gaid,\n",
    "  collect_set(\n",
    "      case \n",
    "      when age is not null then to_json(struct(age AS k))\n",
    "      when score is not null then to_json(struct(score AS k))\n",
    "    end\n",
    "    ) AS distinct_combined_list  -- 使用 collect_set 自动去重\n",
    "FROM t_data\n",
    "group by gaid\n",
    "\"\"\").show()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 35,
   "id": "80c0b7c0-e90e-4446-b2e8-cb06e4bfa7f9",
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "+----+----------------------+\n",
      "|gaid|distinct_combined_list|\n",
      "+----+----------------------+\n",
      "|   a| [{\"k\":100}, {\"k\":20}]|\n",
      "+----+----------------------+\n",
      "\n"
     ]
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "25/09/11 21:30:53 WARN HeartbeatReceiver: Removing executor driver with no recent heartbeats: 938271 ms exceeds timeout 120000 ms\n",
      "25/09/11 21:30:53 WARN SparkContext: Killing executors is not supported by current scheduler.\n"
     ]
    }
   ],
   "source": [
    "spark.sql(\"\"\"\n",
    "with t_data as (\n",
    "    select 'a' gaid,20 age,null score\n",
    "    union all\n",
    "    select 'a' gaid,20 age,100 score  -- 同时有age和score\n",
    "    union all\n",
    "    select 'a' gaid,null age,null score  -- 无效数据\n",
    "    union all\n",
    "    select 'a' gaid,20 age,null score  -- 重复数据\n",
    "),\n",
    "-- 步骤1：将每行的age和score拆分为独立的JSON元素\n",
    "split_data as (\n",
    "    select \n",
    "        gaid,\n",
    "        -- 生成包含age和score的数组（过滤null值）\n",
    "        filter(\n",
    "            array(\n",
    "                if(age is not null, to_json(struct(age as k)), null),\n",
    "                if(score is not null, to_json(struct(score as k)), null)\n",
    "            ), \n",
    "            x -> x is not null  -- 过滤掉null元素\n",
    "        ) as json_array\n",
    "    from t_data\n",
    "),\n",
    "-- 步骤2：将数组拆分为多行（一行一个JSON元素）\n",
    "exploded_data as (\n",
    "    select \n",
    "        gaid,\n",
    "        explode(json_array) as json_element  -- 拆分数组\n",
    "    from split_data\n",
    "    where size(json_array) > 0  -- 排除空数组\n",
    ")\n",
    "-- 步骤3：聚合去重\n",
    "select \n",
    "    gaid,\n",
    "    collect_set(json_element) as distinct_combined_list\n",
    "from exploded_data\n",
    "group by gaid\n",
    "\"\"\").show(truncate=1000, vertical=False)\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 20,
   "id": "ffbd0a8a-a31f-4184-b7dd-97c4b6ddb267",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "60"
      ]
     },
     "execution_count": 20,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "4*15"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 21,
   "id": "8b6f1b15-f8c8-413a-8926-8958338ca840",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "4.615384615384615"
      ]
     },
     "execution_count": 21,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "60/13"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 22,
   "id": "b31c3115-eb11-434f-99de-ad6429881f84",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "4.8"
      ]
     },
     "execution_count": 22,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "4*1.2"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 23,
   "id": "cd52e339-e659-495b-8164-5314a45cb6c1",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "5.2"
      ]
     },
     "execution_count": 23,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "4*1.3"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 24,
   "id": "181c9dc6-41aa-4bbc-8bc5-84e64818590a",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "1.25"
      ]
     },
     "execution_count": 24,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "5/4.0"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 25,
   "id": "2bb3efef-29c1-473b-87f2-7a22d01632f3",
   "metadata": {},
   "outputs": [
    {
     "data": {
      "text/plain": [
       "65"
      ]
     },
     "execution_count": 25,
     "metadata": {},
     "output_type": "execute_result"
    }
   ],
   "source": [
    "5*13"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "63911577-37dd-44f8-9246-cbe49759849d",
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.13.2"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
