{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "id": "815091a3-4c41-4e24-bb34-0eafea277289",
   "metadata": {},
   "outputs": [],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "id": "75c35045-9b5a-4adf-9c1b-4aa0c0cadbbe",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Starting Spark application\n"
     ]
    },
    {
     "data": {
      "text/html": [
       "<table>\n",
       "<tr><th>ID</th><th>YARN Application ID</th><th>Kind</th><th>State</th><th>Spark UI</th><th>Driver log</th><th>User</th><th>Current session?</th></tr><tr><td>543</td><td>application_1710829601063_75069</td><td>pyspark</td><td>idle</td><td><a target=\"_blank\" href=\"https://nfplmxglx134:26001/proxy/application_1710829601063_75069/\">Link</a></td><td><a target=\"_blank\" href=\"https://nfplmxglx139:26010/node/containerlogs/container_e15_1710829601063_75069_01_000001/mxgl_gsyw_sxywtxcs\">Link</a></td><td>None</td><td>✔</td></tr></table>"
      ],
      "text/plain": [
       "<IPython.core.display.HTML object>"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "f60a2896cb1b4f028dfae6e98f69990c",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "SparkSession available as 'spark'.\n"
     ]
    },
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "7ecb579d1736495a9422a2f7c2566ac3",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "DataFrame[]"
     ]
    }
   ],
   "source": [
    "s = spark\n",
    "spark.sql('use mxgl_gsyw_sxywtxcs')"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 8,
   "id": "ce4e2596-246c-4988-a190-baf9580cdb4f",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "96b8edca8d6b417e9c274693e303136c",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    }
   ],
   "source": [
    "# time1 = \"2020-01-23\"\n",
    "# time2 = '2024-12-31'\n",
    "time1 = \"2022-01-01\"\n",
    "time2 = '2023-01-01'\n",
    "year = time1[0:4]\n",
    "#是否有环境污染负面舆情   -- 有问题\n",
    "hjwrfmyq = f'''\n",
    "select distinct companyname   as custname,1 as hjwrfmyq \n",
    "from fh_stg_news\n",
    "where (partytitle rlike '被列入环境信用失信|被列入环境信用警示|生态环境部黑榜|被生态环境部拉黑|污染环境案|因污染被点名|违反国家大气污染防治法|噪声污染|污染事件|环境污染'\n",
    "or partytitle like '%废气%污染%' \n",
    "or partytitle like '%扬尘%污染%'\n",
    "or partytitle like '%污染%案%'\n",
    "or partytitle like '%污染物%超标%'\n",
    "or partytitle like '%被列入%污染重点监管单位名单')\n",
    "and to_date(replace(sorttime,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "'''\n",
    "\n",
    "#是否曾发行绿债\n",
    "fxlz = f'''\n",
    "select  distinct qentname  as custname,1 as fxlz\n",
    "from ys_rst_zizhi_chinamoneybond \n",
    "where   to_date(replace(sdate,'/','-'))  between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "'''\n",
    "\n",
    "#受到劳动纠纷被诉次数\n",
    "sdldjfbscs = f'''\n",
    "select companyname  as custname,count(distinct companyname,caseno) as sdldjfbscs\n",
    "from \n",
    "fh_stg_ktgg\n",
    "where to_date(replace(sorttime,'/','-'))  between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and body rlike '劳动合同纠纷|劳动争议' and body not rlike concat('原告.{{0,4}}',cast(companyname as string))\n",
    "and caseno is not null\n",
    "and caseno not in ('',' ') \n",
    "group by companyname\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "#女性高管占比\n",
    "\n",
    "nxggzb = f'''\n",
    "select qentname  as custname,\n",
    "(sum(case when gender = '女' then 1 else 0 end)/count(gender)) * 100  as nxggzb\n",
    "from (select distinct * from ys_rst_zizhi_stockmanagers) T\n",
    "where gender is not null and \n",
    "\n",
    "(enddate between    to_date(\"{time1}\") and to_date(\"{time2}\")    or startdate between to_date(\"{time1}\")  and  to_date(\"{time2}\") )\n",
    "\n",
    "group by qentname\n",
    "\n",
    "'''\n",
    "\n",
    "#社保是否发生欠缴费\n",
    "\n",
    "sbsffsqjf = f'''\n",
    "select distinct qentname  as custname,1 as sbsffsqjf\n",
    "from ys_rst_deep_yearreportsocsecs \n",
    "where cast( unpaidsocialinsso110 as decimal(10,2)) > 0 \n",
    "or  cast( unpaidsocialinsso210 as decimal(10,2)) > 0 \n",
    "or  cast( unpaidsocialinsso310 as decimal(10,2)) > 0\n",
    "or  cast( unpaidsocialinsso410 as decimal(10,2)) > 0\n",
    "or  cast( unpaidsocialinsso510 as decimal(10,2)) > 0\n",
    "'''\n",
    "\n",
    "\n",
    "#拥有著作权数量\n",
    "\n",
    "yyzzqsl = f'''\n",
    "select qentname  as custname,count(distinct qentname,anndate) as yyzzqsl\n",
    "from \n",
    "(\n",
    "select qentname,anndate\n",
    "from  ys_rst_zizhi_products\n",
    "union \n",
    "select qentname,anndate\n",
    "from ys_rst_zizhi_softwares\n",
    ") t \n",
    "where anndate between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "group by qentname\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#技术创新领先企业\n",
    "#可以跑通，时间未加，body title partytile 都有内容确定选用字段  \n",
    "jscxlxqy = f'''\n",
    "select  distinct companyname  as custname,1 as jscxlxqy \n",
    "from fh_stg_news\n",
    "where  partytitle like '%被列入%专精特新%'\n",
    "or partytitle like '%被列入%瞪羚%'\n",
    "or partytitle like '%被列入%独角兽%'\n",
    "or partytitle like '%被列入%单项冠军%'\n",
    "or partytitle like '%被列入%隐形冠军%'\n",
    "or partytitle like '%被列入%创新型企业%'\n",
    "or partytitle like '%被列入%科技中小型企业%'\n",
    "or partytitle like '%被列入%科改示范型企业%'\n",
    "or partytitle like '%被列入%高新技术企业%'\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#参与慈善捐赠等公益活动\n",
    "\n",
    "\n",
    "cycsjzdgyhd = f'''\n",
    "select distinct companyname  as custname,1 as cycsjzdgyhd\n",
    "from fh_stg_news\n",
    "where body rlike '慈善|公益|捐款|捐赠|扶贫'\n",
    "and to_date(replace(sorttime,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "'''\n",
    "\n",
    "\n",
    "#涉及民间借贷纠纷案件被诉次数\n",
    "\n",
    "mjjdbscs = f'''\n",
    "select companyname  as custname,count(distinct companyname,caseno) as mjjdbscs\n",
    "from \n",
    " fh_stg_ktgg\n",
    "where  to_date(replace(sorttime,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and body rlike '民间借贷'  and body not rlike concat('原告.{{0,4}}',cast(companyname as string)) \n",
    "and caseno is not null\n",
    "and caseno not in ('',' ') \n",
    "group by companyname \n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#股东出质股权次数\n",
    "\n",
    "czgqcs = f'''\n",
    "select qentname  as custname,count(qentname,stkpawndate) as czgqcs\n",
    "from ys_rst_deep_stockpawns\n",
    "where  to_date(replace(stkpawndate,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and stkpawnstatus = '有效'\n",
    "group by qentname \n",
    "'''\n",
    "\n",
    "\n",
    "# 股权冻结/转让次数\n",
    "# 可以跑通，时间未加\n",
    "# gqdjzrcs = \n",
    "\n",
    "gqdjzrcs = f'''\n",
    "select qentname  as custname,count(distinct qentname,frofrom,froto) as gqdjzrcs\n",
    "from ys_rst_deep_judicialaiddetails\n",
    "where   (to_date(replace(frofrom,'/','-')) between    to_date(\"{time1}\") and to_date(\"{time2}\")    or to_date(replace( froto,'/','-')) between to_date(\"{time1}\")  and  to_date(\"{time2}\") )\n",
    "group by qentname \n",
    "'''\n",
    "\n",
    "#最新一期纳税信用等级\n",
    "#已跑通 \n",
    "nsxydj = f'''\n",
    "select \n",
    "companyname   as custname,\n",
    "min(case when eventresult like '%A%' then 1\n",
    "when eventresult like '%B%' then 2\n",
    "when eventresult like '%C%' then 3 \n",
    "when eventresult like '%D%' then 4\n",
    "when eventresult like '%M%' then 5 \n",
    "else null end ) as nsxydj\n",
    "\n",
    "from  fh_stg_satparty_xin\n",
    "where to_date(replace(sorttime,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "group by companyname\n",
    "'''\n",
    "\n",
    "#税务非正常户\n",
    "#可以跑通，时间未加\n",
    "swfzch = f'''\n",
    "select distinct companyname  as custname,1 as swfzch\n",
    "from fh_stg_satparty_fzc \n",
    "where to_date(replace(posttime,'/','-'))  between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "'''\n",
    "\n",
    "\n",
    "#是否欠税\n",
    "\n",
    "sfqs = f'''\n",
    "select distinct qentname  as custname,1 as sfqs\n",
    "from ys_rst_sifainfo_qsggents\n",
    "where  to_date(replace( pubtime,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#经营异常次数\n",
    "#可以跑通，时间未加\n",
    "jyyccs = f'''\n",
    "select distinct qentname  as custname,count(distinct qentname,indate) as jyyccs\n",
    "from ys_rst_deep_exceptions\n",
    "where  to_date(replace( indate,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "group by qentname  \n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#经营异常类型\n",
    "#写已跑通\n",
    "jyyclx = f'''\n",
    "\n",
    "select qentname   as custname,\n",
    "max(case when outreason > 0 then outreason else inreason end) as jyyclx\n",
    "from \n",
    "\n",
    "(\n",
    "select qentname ,\n",
    "case \n",
    " when inreason rlike '信息隐瞒|弄虚作假' then  3\n",
    "when inreason rlike '未依照.*公示|未按照.*公示' then 2 \n",
    "when inreason rlike '经营场所无法联系的' then 1 \n",
    "else null end as inreason,\n",
    "case when outreason rlike '严重违法' then 4 else null end as outreason\n",
    "from ys_rst_deep_exceptions \n",
    "where  to_date(replace( indate,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    ")\n",
    "t\n",
    "group by qentname\n",
    "'''\n",
    "\n",
    "# s.sql(jyyclx).show()\n",
    "#列入失信被执行\n",
    "#已跑通\n",
    "lrsxbzx = f'''\n",
    "select distinct qentname  as custname,1 as lrsxbzx\n",
    "from \n",
    "(\n",
    "select qentname,regdateclean\n",
    "from\n",
    "ys_rst_deep_punishbreaks \n",
    "union\n",
    "select qentname,fsxlasj as regdateclean\n",
    "from \n",
    "ys_rst_sifainfo_caselesscredits  \n",
    "union\n",
    "select qentname ,regdateclean\n",
    "from \n",
    "ys_rst_deep_punisheds   \n",
    ")\n",
    "\n",
    "where  to_date(replace( regdateclean,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "'''\n",
    "\n",
    "\n",
    "#动产抵押次数\n",
    "#可以跑通，时间未加\n",
    "dcdycs = f'''\n",
    "select qentname  as custname ,count(distinct mabregdate,qentname)  as dcdycs\n",
    "from ys_rst_deep_mortgagebasics\n",
    "where  to_date(replace( mabregdate,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "group by qentname\n",
    "'''\n",
    "\n",
    "#不动产抵押次数\n",
    "#可以跑通，时间未加\n",
    "bdcdycs = f'''\n",
    "select qentname  as custname ,count(distinct sdate,qentname)  as bdcdycs\n",
    "from ys_rst_zizhi_landmort \n",
    "where sdate between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "group by qentname \n",
    "'''\n",
    "\n",
    "#对外提供担保次数   -- 双向时间问题 \n",
    "dwdbcs = f''' \n",
    "select qentname   as custname,count(distinct pefperform,pefperto,qentname)  as dwdbcs\n",
    "from ys_rst_deep_yearreportforguarantees\n",
    "where( pefperform between  to_date(\"{time1}\")  and   to_date(\"{time2}\") ) or  (pefperto  between to_date(\"{time1}\")  and   to_date(\"{time2}\") )\n",
    "group by qentname\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#其他行政处罚  \n",
    "\n",
    "qtxzcfje =  f''' \n",
    "\n",
    "select custname,\n",
    "case when qtxzcfje > 100000000 then qtxzcfje / 100\n",
    "when qtxzcfje  > 5000000  then qtxzcfje / 10   else qtxzcfje end as qtxzcfje\n",
    "from \n",
    "(\n",
    "select\n",
    "companyname  as custname,\n",
    "sum(coalesce(yuan,yuan1,wan,wan2,wan3,yuan2,yuan3,yuan4) + coalesce(moushou1,0))  as qtxzcfje\n",
    "from\n",
    "\n",
    "(\n",
    "select companyname,old,eventresult,\n",
    "cast(REGEXP_EXTRACT(eventresult,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan,\n",
    "cast(REGEXP_EXTRACT(eventresult,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)$',1) as decimal(10,2)) as yuan1,\n",
    "cast(REGEXP_EXTRACT(eventresult,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan,\n",
    "cast(REGEXP_EXTRACT(eventresult,'^([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan2,\n",
    "cast(REGEXP_EXTRACT(eventresult,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan3,\n",
    "cast(REGEXP_EXTRACT(eventresult,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(eventresult,'（￥?([0-9\\.]+)元?）',1) as decimal(10,2)) as yuan3,\n",
    "cast(REGEXP_EXTRACT(eventresult,'(^[0-9\\.]+$)',1) as decimal(10,2)) as yuan4,\n",
    "cast(REGEXP_EXTRACT(eventresult,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1\n",
    "\n",
    "from \n",
    "(\n",
    "select companyname,eventresult as old ,\n",
    "REPLACE(REPLACE(\n",
    "REPLACE(\n",
    "        REPLACE(\n",
    "                REPLACE(\n",
    "                        REPLACE(\n",
    "                                REPLACE(\n",
    "                                        REPLACE(\n",
    "                                                REPLACE(\n",
    "                                                        REPLACE(\n",
    "                                                                REPLACE(\n",
    "                                                                        REPLACE(\n",
    "                                                                                REPLACE(\n",
    "                                                                                        REPLACE(\n",
    "                                                                                                REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(eventresult,':',''),'壹','1'),'贰',2),'叁',3),'肆',4),'伍',5)\n",
    "                                                                                        ,'陆',6)\n",
    "                                                                                ,'柒',7)\n",
    "                                                                        ,'捌',8)\n",
    "                                                                ,'玖',9)\n",
    "                                                        ,'五',5)\n",
    "                                                ,'一',1)\n",
    "                                        ,'二',2)\n",
    "                                ,'三',3)\n",
    "                        ,'四',4)\n",
    "                ,'六',6)\n",
    "        ,'七',7)\n",
    ",'八',8)\n",
    ",'九',9),'玫',9) as eventresult\n",
    "from \n",
    "(\n",
    "\n",
    "select companyname,yjcode,max(eventresult) as eventresult from \n",
    "(select companyname,sorttime,replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(yjcode,'[',''),']',''),'﹝',''),'﹞',''),'【',''),'】',''),'(',''),')',''),'（',''),'）','') as yjcode,eventresult from fh_stg_credit_chufa) t \n",
    "where eventresult <> '' and eventresult is not null and to_date(replace( sorttime,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\") \n",
    "and yjcode is not null\n",
    "and yjcode not in ('',' ')\n",
    "group by companyname,yjcode\n",
    ") t\n",
    ") T\n",
    ") T2\n",
    "group by companyname\n",
    ") t \n",
    "'''\n",
    "#可以跑通，时间未加\n",
    "qtxzcfcs = f'''\n",
    "select companyname  as custname ,count(distinct yjcode,companyname) as qtxzcfcs\n",
    "from (select companyname,sorttime,replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(yjcode,'[',''),']',''),'﹝',''),'﹞',''),'【',''),'】',''),'(',''),')',''),'（',''),'）','') as yjcode,eventresult from fh_stg_credit_chufa) t\n",
    "where to_date(replace( sorttime,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and yjcode is not null\n",
    "and yjcode not in ('',' ')\n",
    "group by companyname\n",
    "'''\n",
    "\n",
    "#TODO result or type\n",
    "#TODO case when 的优先级\n",
    "\n",
    "qtxzcflx = f'''\n",
    "select companyname  as custname,max(qtxzcflx) as qtxzcflx\n",
    "from \n",
    "(select companyname , \n",
    "case when eventresult rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when eventresult rlike '警告|通报批评' then 2\n",
    "    when eventresult rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when eventresult rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when eventresult rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when eventresult rlike '行政拘留' then 6 else null end as qtxzcflx\n",
    "from fh_stg_credit_chufa\n",
    "where to_date(replace( sorttime,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    ") t \n",
    "group by companyname\n",
    "'''\n",
    "\n",
    "# # 其他行政处罚 ys_rst_deep_entcasebaseinfos\n",
    "# qtxzcfje = f'''\n",
    "# select\n",
    "# qentname as custname,\n",
    "# sum(coalesce(yuan,yuan1,wan,wan2,wan3,yuan2,yuan3,yuan4) + coalesce(moushou1,0)) as qtxzcfje\n",
    "\n",
    "# from\n",
    "\n",
    "# (\n",
    "# select qentname,old,pencontent,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)$',1) as decimal(10,2)) as yuan1,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'^([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan2,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan3,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'（￥?([0-9\\.]+)元?）',1) as decimal(10,2)) as yuan3,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'(^[0-9\\.]+$)',1) as decimal(10,2)) as yuan4,\n",
    "# cast(REGEXP_EXTRACT(pencontent,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1\n",
    "\n",
    "# from \n",
    "# (\n",
    "# select qentname,pencontent as old ,\n",
    "# REPLACE(REPLACE(\n",
    "# REPLACE(\n",
    "#         REPLACE(\n",
    "#                 REPLACE(\n",
    "#                         REPLACE(\n",
    "#                                 REPLACE(\n",
    "#                                         REPLACE(\n",
    "#                                                 REPLACE(\n",
    "#                                                         REPLACE(\n",
    "#                                                                 REPLACE(\n",
    "#                                                                         REPLACE(\n",
    "#                                                                                 REPLACE(\n",
    "#                                                                                         REPLACE(\n",
    "#                                                                                                 REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(pencontent,':',''),'壹','1'),'贰',2),'叁',3),'肆',4),'伍',5)\n",
    "#                                                                                         ,'陆',6)\n",
    "#                                                                                 ,'柒',7)\n",
    "#                                                                         ,'捌',8)\n",
    "#                                                                 ,'玖',9)\n",
    "#                                                         ,'五',5)\n",
    "#                                                 ,'一',1)\n",
    "#                                         ,'二',2)\n",
    "#                                 ,'三',3)\n",
    "#                         ,'四',4)\n",
    "#                 ,'六',6)\n",
    "#         ,'七',7)\n",
    "# ,'八',8)\n",
    "# ,'九',9),'玫',9) as pencontent\n",
    "# from \n",
    "# (select qentname,pendecno,max(pencontent) as pencontent from \n",
    "# (\n",
    "\n",
    "\n",
    "# select qentname,pendecno,pencontent,pendecissdate  from ys_rst_deep_entcasebaseinfos\n",
    "# union\n",
    "# select companyname,yjcode ,eventresult,sorttime  from fh_stg_credit_chufa\n",
    "\n",
    "# ) t where pencontent <> '' and pencontent is not null and to_date(replace( pendecissdate,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "# group by qentname,pendecno\n",
    "# ) t\n",
    "# ) T\n",
    "# ) T2\n",
    "# group by qentname\n",
    "# '''\n",
    "\n",
    "# #可以跑通，时间未加\n",
    "# qtxzcfcs = f''' \n",
    "# select qentname  as custname,count(distinct pendecno,qentname) as qtxzcfcs\n",
    "# from \n",
    "# (\n",
    "\n",
    "\n",
    "# select qentname,pendecno,pencontent,pendecissdate  from ys_rst_deep_entcasebaseinfos\n",
    "# union\n",
    "# select companyname,yjcode ,eventresult,sorttime  from fh_stg_credit_chufa\n",
    "\n",
    "# ) t\n",
    "# where  to_date(replace( pendecissdate,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "# group by qentname \n",
    "#  '''\n",
    "\n",
    "# #可以跑通，时间未加\n",
    "# qtxzcflx = f'''\n",
    "\n",
    "# select qentname  as custname,max(qtxzcflx) as qtxzcflx\n",
    "# from \n",
    "# (select qentname , \n",
    "# case when pencontent rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "#     when pencontent rlike '警告|通报批评' then 2\n",
    "#     when pencontent rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "#     when pencontent rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "#     when pencontent rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "#     when pencontent rlike '行政拘留' then 6 else null end as qtxzcflx\n",
    "# from  (\n",
    "\n",
    "\n",
    "# select qentname,pendecno,pencontent,pendecissdate  from ys_rst_deep_entcasebaseinfos\n",
    "# union\n",
    "# select companyname,yjcode ,eventresult,sorttime  from fh_stg_credit_chufa\n",
    "\n",
    "# ) t\n",
    "# where to_date(replace( pendecissdate,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "# ) t \n",
    "# group by qentname\n",
    "\n",
    "# '''\n",
    "\n",
    "# qentname,pendecno,pencontent,pendecissdate  from ys_rst_deep_entcasebaseinfos\n",
    "# companyname,yjcode ,eventresult,sorttime  from fh_stg_credit_chufa\n",
    "\n",
    "\n",
    "\n",
    "#其他案件被诉次数\n",
    "#可以跑通，时间未加\n",
    "\n",
    "qtajbscs = f'''\n",
    "select companyname  as custname,count(distinct body,caseno) as qtajbscs\n",
    "from \n",
    "fh_stg_ktgg\n",
    "where   to_date(replace( sorttime,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and body not rlike '劳动合同纠纷|劳动争议|民间借贷'   and body not rlike concat('原告.{{0,4}}',cast(companyname as string)) \n",
    "and caseno is not null\n",
    "and caseno not in ('',' ') \n",
    "group by companyname\n",
    "'''\n",
    "\n",
    "\n",
    "fsaqscsgyzcd = f''' \n",
    "select \n",
    "industryname   as custname,\n",
    "max(case when  cast(number_of_fatalities as decimal(10,2)) > 30 or cast(number_of_serious_injuries as decimal(10,2)) > 100 or  cast(economic_loss as decimal(10,2)) > 10000 then 4 \n",
    "when cast(number_of_fatalities as decimal(10,2)) between 10 and 30 or  cast(number_of_serious_injuries as decimal(10,2)) between  50 and 100 or   cast(economic_loss as decimal(10,2)) between 5000 and 10000 then 3 \n",
    "when cast(number_of_fatalities as decimal(10,2)) between 3 and 10 or  cast(number_of_serious_injuries as decimal(10,2)) between  10 and 50 or   cast(economic_loss as decimal(10,2)) between 1000 and 5000 then 2 \n",
    "else 1 end) as fsaqscsgyzcd\n",
    "from trans_c24041\n",
    "where to_date(replace('accident_time','/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")\n",
    "group by industryname\n",
    "''' \n",
    "\n",
    "yzwfsx = f''' \n",
    "select  distinct qentname  as custname,1 as yzwfsx \n",
    "from ys_rst_shixin_data\n",
    "where to_date(indate) between to_date(\"{time1}\") and to_date(\"{time2}\") and (oudate is null or oudate = '' or oudate = ' ' )\n",
    "'''\n",
    "#  企业事故年份\t    \"year_of_enterprise_accidents\": \"2012\",\n",
    "# 事故时间\t    \"accident_time\": \"2012/4/8 0:00:00\",\n",
    "# 死亡人数\t    \"number_of_fatalities\": \"1\",\n",
    "# 企业信用代码\t    \"industrycode\": \"91340100149145267M\",\n",
    "# 经济损失\t    \"economic_loss\": \"0.0000\",\n",
    "# 重伤人数\t    \"number_of_serious_injuries\": \"0\"   (number_of_fatalities < 3 and number_of_fatalities >1 ) or (number_of_serious_injuries > 1 and number_of_serious_injuries < 10 ) or (economic_loss < 1000)\n",
    "\n",
    "# 未参评\n",
    "# 3人以下死亡，或10人以下重伤，或者1000万元以下直接经济损失的事故归于此分箱\n",
    "# 3人以上10人以下死亡，或者10人以上50人以下重伤，或者1000万元以上5000万元以下直接经济损失的事故\n",
    "# 10人以上30人以下死亡，或者50人以上100人以下重伤，或者5000万元以上1亿元以下直接经济损失的事故归于此分箱\n",
    "# 30人以上死亡，或者100人以上重伤，或者1亿元以上直接经济损失的事故归于此分箱\n",
    "\n",
    "\n",
    "#      to_date(replace( sorttime,'/','-'))   between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "# s.sql('').show(10)\n",
    "# s.sql('select release_time from trans_c24042 where release_time <> \"\" order by release_time desc ').show(10)\n",
    "# s.sql('select count( distinct *) from trans_c24042 ').show()\n",
    "# s.sql('select count( distinct *) from ys_rst_deep_entcasebaseinfos').show()\n",
    "# s.sql('select count(distinct qentname) from ys_rst_deep_entcasebaseinfos').show()\n",
    "# s.sql('select count( distinct industryname) from  trans_c24042').show()\n",
    "\n",
    "\n",
    "#是否型指标加distinct\n",
    " #环境处罚金额\n",
    "hjcfje = f'''\n",
    "\n",
    "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as hjcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result  from \n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty\n",
    "from \n",
    "trans_c24042 \n",
    "union \n",
    "select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty\n",
    "from trans_c24021\n",
    "\n",
    ") t1\n",
    "\n",
    "where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and  basis_for_penalty rlike'环境保护|污染环境|环境卫生|河道|施工现场|城市管理|垃圾' \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "#环境处罚次数\n",
    "hjcfcs =  f'''\n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as hjcfcs\n",
    "from \n",
    "\n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time\n",
    "from \n",
    "trans_c24042 \n",
    "union \n",
    "select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time\n",
    "from trans_c24021\n",
    "\n",
    ") t1\n",
    "\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and  basis_for_penalty rlike'环境保护|污染环境|环境卫生|河道|施工现场|城市管理|垃圾' \n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "\n",
    "#环境处罚最严重的类型 环境监管记录 \n",
    "hjcfzyzlx = f'''\n",
    "select industryname as custname,max(hjcfzyzlx) as hjcfzyzlx\n",
    "from \n",
    "(select industryname,\n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as hjcfzyzlx\n",
    "from \n",
    "\n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time\n",
    "from \n",
    "trans_c24042 \n",
    "union \n",
    "select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time\n",
    "from trans_c24021\n",
    "\n",
    ") t1\n",
    "\n",
    "\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "and  basis_for_penalty rlike'环境保护|污染环境|环境卫生|河道|施工现场|城市管理|垃圾' \n",
    ") t \n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "#是否被列为限停产企业  数据表名：限停产企业（含淘汰落后产能）批量查询；限停产豁免批量查询   \n",
    "sfblwxtcqy = f'''\n",
    "select distinct industryname as custname,1 as sfblwxtcqy\n",
    "from  \n",
    "\n",
    "\n",
    "(\n",
    "select industryname,start_time,end_time\n",
    "from \n",
    "trans_c24030 \n",
    "union \n",
    "select industryname,start_time,end_time\n",
    "from trans_c24009\n",
    "\n",
    ") t1\n",
    "\n",
    "where industryname not in (select industryname as a from trans_c24036 ) \n",
    "and  (to_date(replace(start_time,'/','-')) between to_date(\"{time1}\") and  to_date(\"{time2}\")  ) or ( to_date(replace(end_time,'/','-'))   between to_date(\"{time1}\") and  to_date(\"{time2}\"))\n",
    "\n",
    "'''\n",
    "\n",
    "#被下达排污限期整改次数 排污许可限期整改\n",
    "\n",
    "\n",
    "pwxqzgcs = f''' \n",
    "select industryname as custname,count(distinct rectification_period_start_time,rectification_period_end_time,industryname) as pwxqzgcs\n",
    "from trans_c24023\n",
    "where \n",
    " (to_date(replace(rectification_period_start_time,'/','-')) between to_date(\"{time1}\") and  to_date(\"{time2}\")  ) or ( to_date(replace(rectification_period_end_time,'/','-'))   between to_date(\"{time1}\") and  to_date(\"{time2}\"))\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "\n",
    "#排污许可证是否在有效期内  数据表名：排污许可信息批量查询\n",
    "\n",
    "pwxkzsfzyxqn =  f'''\n",
    "select distinct industryname as custname,1 as pwxkzsfzyxqn\n",
    "from \n",
    "\n",
    "(select industryname,validity_period_end_time\n",
    "from \n",
    "trans_c24032\n",
    "union\n",
    "select  industryname,validity_period_end_time\n",
    "from \n",
    "trans_c24011\n",
    "\n",
    ")\n",
    "\n",
    "where  to_date(replace(validity_period_end_time,'/','-')) > to_date(\"{time2}\") \n",
    "'''\n",
    "\n",
    "#突发环境事件风险等级  突发环境事件风险等级\n",
    "hjfxdj =  f'''\n",
    "\n",
    "select  industryname as custname,max(hjfxdj) as hjfxdj\n",
    "from \n",
    "(select industryname,\n",
    "    case when risk_level rlike '其他' Then 1 \n",
    "        when risk_level rlike '一般' then 2\n",
    "        when risk_level rlike '较大' then 3\n",
    "        when risk_level rlike '重大' then 4\n",
    "        else null end as hjfxdj\n",
    "from \n",
    "\n",
    "(\n",
    "\n",
    "select year_of_environmental_risk_enterprise_list,industryname,risk_level\n",
    "\n",
    "from \n",
    "trans_c24029\n",
    "union\n",
    "select year_of_environmental_risk_enterprise_list,industryname,risk_level\n",
    "from \n",
    "trans_c24008\n",
    "\n",
    "\n",
    ") t1\n",
    "\n",
    "where  year_of_environmental_risk_enterprise_list > to_date(\"{year}\")  \n",
    ") t\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "#近12个月二氧化碳排放量  碳市场企业碳排放数据\n",
    "\n",
    "tpfl = f'''\n",
    "select industryname as custname,sum(case when total_co2_emissions_from_all_units is null then 0 else total_co2_emissions_from_all_units end ) as tpfl\n",
    "from   trans_c24037\n",
    "where carbon_data_year > to_date(\"{year}\")\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#企业环境信用评价等级\n",
    "\n",
    "hjxypjdj =f'''\n",
    "\n",
    "  \n",
    "  select t1.industryname as custname,\n",
    " \tmax(case when rating_result_announcement like '%绿%' then 1 \n",
    "      when rating_result_announcement like '%蓝%' then 2 \n",
    "      when rating_result_announcement like '%蓝%' and flag =1  then 3\n",
    "      when rating_result_announcement like '%黄%' then 4 \n",
    "      when rating_result_announcement like '%黄%' and flag =1  then 5 \n",
    "       when rating_result_announcement like '%红%' then 6 \n",
    "        when rating_result_announcement like '%黑%' then 7 end ) as  hjxypjdj\n",
    "from \n",
    "\n",
    "  \n",
    "( \n",
    "  select industryname, \n",
    "case when  rating_result_announcement is null or  rating_result_announcement = '' or rating_result_announcement = ' ' then rating_result_released\n",
    "else rating_result_announcement\n",
    "  end as rating_result_announcement \n",
    "from trans_c24024\n",
    "where enterprise_environmental_rating_year > to_date(\"{year}\") \n",
    ")\n",
    "t1 left join \n",
    "(\n",
    "\n",
    "select distinct industryname,1 as flag \n",
    "from \n",
    "\n",
    "(\n",
    "select industryname,year\n",
    "from \n",
    "trans_c24026 \n",
    "union\n",
    "select  industryname,year\n",
    "from \n",
    "trans_c24005\n",
    "\n",
    ") t1 \n",
    "where year > to_date(\"{year}\")\n",
    "union\n",
    "select distinct industryname,1 as flag \n",
    "from \n",
    "\n",
    "(\n",
    "select industryname,key_energy_consumption_year\n",
    "from \n",
    "trans_c24027 \n",
    "\n",
    "union\n",
    "select industryname,key_energy_consumption_year\n",
    "from \n",
    "trans_c24006\n",
    "\n",
    ") t11\n",
    "where key_energy_consumption_year > to_date(\"{year}\")\n",
    "union\n",
    "select distinct industryname,1 as flag \n",
    "from\n",
    "(\n",
    "\n",
    "select industryname,year_of_carbon_key_polluting_units\n",
    "from \n",
    "trans_c24038 \n",
    "union\n",
    "select industryname,year_of_carbon_key_polluting_units\n",
    "from \n",
    "trans_c24017\n",
    "\n",
    ") t12\n",
    "\n",
    "where year_of_carbon_key_polluting_units  > to_date(\"{year}\")\n",
    "\n",
    "\n",
    ") t2\n",
    "on t1.industryname = t2.industryname\n",
    "\n",
    "group by  t1.industryname\n",
    "\n",
    "\n",
    "'''\n",
    "\n",
    "# 企业应急减排绩效分级\n",
    "\n",
    "yjjpjx = f'''\n",
    "\n",
    "select industryname as custname,max(yjjpjx) as yjjpjx\n",
    "from \n",
    "(\n",
    "select industryname , \n",
    "case when control_type like '%A%' then 1  \n",
    " when control_type like '%引领性%' then 2 \n",
    " when control_type like '%B-%' then 4\n",
    "  when control_type like '%B%' then 3 \n",
    "    when control_type like '%保障类%' then 5\n",
    "     when control_type like '%C%' then 6\n",
    "      when control_type like '%其他%' then 7\n",
    "       when control_type like '%D%' then 8\n",
    "        when control_type like '%非引领性%' then 9\n",
    "         when control_type like '%长期停产%' then 10 \n",
    "         else null end as yjjpjx\n",
    "from \n",
    "\n",
    "(\n",
    "select industryname,control_type,year_of_enterprise_performance_grading\n",
    "from \n",
    "trans_c24031\n",
    "union\n",
    "select industryname,control_type,year_of_enterprise_performance_grading\n",
    "from \n",
    "trans_c24010\n",
    "\n",
    ")\n",
    "\n",
    "where year_of_enterprise_performance_grading  > to_date(\"{year}\")\n",
    ") t \n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "#是否被列为正面清单企业  数据表名：正面清单企业批量查询\n",
    "\n",
    "sfblwzmqdqy = f'''\n",
    "select distinct industryname as custname,1 as sfblwzmqdqy\n",
    "from \n",
    "\n",
    "(\n",
    "\n",
    "select industryname,control_start_time,control_end_time\n",
    "from \n",
    "trans_c24034\n",
    "union\n",
    "\n",
    "select industryname,control_start_time,control_end_time\n",
    "from \n",
    "trans_c24013\n",
    "\n",
    ")\n",
    "\n",
    "where \n",
    "(to_date(replace(control_start_time,'/','-')) between to_date(\"{time1}\") and  to_date(\"{time2}\")  ) or ( to_date(replace(control_end_time,'/','-'))   between to_date(\"{time1}\") and  to_date(\"{time2}\"))\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "# #是否属于节能节水环保企业  数据表名：节能节水环保认证企业批量查询 #缺表名\n",
    "\n",
    "# sfsyjnjshbqy = f'''\n",
    "# select 关联企业名称,1 as sfsyjnjshbqy\n",
    "# from #缺表名\n",
    "# where (有效期至 > {} )\n",
    "# '''\n",
    "\n",
    "\n",
    "#是否被列为清洁生产企业  数据表名：清洁生产审核企业批量查询\n",
    "\n",
    "sfblwqjscqy = f'''\n",
    "select distinct industryname as custname,1 as sfblwqjscqy\n",
    "from \n",
    "(\n",
    "select industryname\n",
    "from \n",
    "trans_c24007\n",
    "union\n",
    "select industryname\n",
    "from \n",
    "trans_c24028\n",
    "\n",
    ")\n",
    "'''\n",
    "\n",
    "\n",
    "#违规获得补贴处罚  数据表名：违规获得补贴处罚\n",
    "wghdbtcfje = f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) ) as wghdbtcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "\n",
    "\n",
    "\n",
    "    (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "\n",
    "\n",
    "where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and  DataType rlike'价格|骗取'  and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "\n",
    "wghdbtcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as wghdbtcfcs\n",
    "from \n",
    "\n",
    "    (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "\n",
    "\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  DataType rlike'价格|骗取'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "wghdbtcflx = f'''\n",
    "\n",
    "select industryname as custname,max(wghdbtcflx) as wghdbtcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as wghdbtcflx\n",
    "from \n",
    "  (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'价格|骗取'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "# #土地利用处罚    数据表名：土地利用处罚\n",
    "tdlycfje = f'''\n",
    "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as tdlycfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "\n",
    "where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and  DataType rlike'土地管理'  \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "\n",
    "tdlycfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as tdlycfcs\n",
    "from \n",
    "\n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  DataType rlike'土地管理'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "tdlycflx = f'''\n",
    "\n",
    "select industryname as custname,max(tdlycflx) as tdlycflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike  '警告|通报批评' then 2\n",
    "    when penalty_result rlike  '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike  '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike  '行政拘留' then 6 else null end as tdlycflx\n",
    "from \n",
    "\n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\")  and  DataType rlike'土地管理'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "#是否为绿色制造企业  数据表名：绿色制造企业批量查询\n",
    "\n",
    "sfwlszzqy = f'''\n",
    "select distinct industryname as custname,1 as sfwlszzqy\n",
    "from\n",
    "\n",
    "(\n",
    "select industryname from \n",
    "trans_c24014\n",
    "union\n",
    "select industryname from \n",
    "trans_c24035\n",
    ") t\n",
    "'''\n",
    "\n",
    "\n",
    "#职业健康处罚   数据表名：职业健康处罚\n",
    "zyjkcfje = f'''\n",
    "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as zyjkcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select  industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "  \n",
    "\n",
    "\n",
    "where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and  DataType rlike'职业健康'\n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    "\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "zyjkcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as zyjkcfcs\n",
    "from \n",
    "\n",
    "\n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "  \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'职业健康'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "zyjkcflx = f'''\n",
    "\n",
    "select industryname as custname,max(zyjkcflx) as zyjkcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as zyjkcflx\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'职业健康'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#劳工处罚   数据表名：劳工处罚\n",
    "lgcfje =f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as lgcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and trim(DataType) like'劳工保障'\n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "lgcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as lgcfcs\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  trim(DataType) like'劳工保障'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "lgcflx = f'''\n",
    "\n",
    "select industryname as custname,max(lgcflx) as lgcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as lgcflx\n",
    "from (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  trim(DataType) like'劳工保障'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#安全监管   数据表名：安全监管\n",
    "aqjgje =  f'''\n",
    "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as aqjgje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    " (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and basis_for_penalty rlike'安全生产|安全管理|安全监督' \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "aqjgcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as aqjgcs\n",
    "from \n",
    " (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and basis_for_penalty rlike'安全生产|安全管理|安全监督'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "aqjglx = f'''\n",
    "\n",
    "select industryname as custname,max(aqjglx) as aqjglx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as aqjglx\n",
    "from  (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and basis_for_penalty rlike'安全生产|安全管理|安全监督'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#消防安全处罚   数据表名：消防安全处罚\n",
    "xfaqcfje = f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as xfaqcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select  industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and DataType rlike'消防'   \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "xfaqcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as xfaqcfcs\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'消防' \n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "xfaqcflx = f'''\n",
    "\n",
    "select industryname as custname,max(xfaqcflx) as xfaqcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as xfaqcflx\n",
    "from (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'消防' \n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "#产品质量处罚   数据表名：产品质量处罚\n",
    "cpzlcfje =  f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as cpzlcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result  from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and trim(DataType) like'产品质量'  \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "cpzlcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as cpzlcfcs\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and trim(DataType) like'产品质量'  \n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "cpzlcflx = f'''\n",
    "\n",
    "select industryname as custname,max(cpzlcflx) as cpzlcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as cpzlcflx\n",
    "from (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and trim(DataType) like'产品质量' \n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#税务处罚   数据表名：税务处罚\n",
    "swcfje =f'''\n",
    "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as swcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select   industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    " and DataType rlike'税务' \n",
    " and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    " group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "swcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as swcfcs\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'税务'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "swcflx = f'''\n",
    "\n",
    "select industryname as custname,max(swcflx) as swcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as swcflx\n",
    "from (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'税务'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#商业贿赂及不正当竞争处罚   数据表名：商业贿赂及不正当竞争处罚\n",
    "syhljbzdjzcfje =f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as syhljbzdjzcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select  industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and DataType rlike'贿赂|不正当|广告违规'    group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "syhljbzdjzcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as syhljbzdjzcfcs\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'贿赂|不正当|广告违规' \n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "syhljbzdjzcflx = f'''\n",
    "\n",
    "select industryname as custname,max(syhljbzdjzcflx) as syhljbzdjzcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as syhljbzdjzcflx\n",
    "from (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and DataType rlike'贿赂|不正当|广告违规'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "#垄断处罚   数据表名：垄断处罚\n",
    "ldcfje = f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as ldcfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select   industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    " (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1   where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and   DataType rlike'垄断' \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "ldcfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as ldcfcs\n",
    "from \n",
    " (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  DataType rlike'垄断' \n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "ldcflx = f'''\n",
    "\n",
    "select industryname as custname,max(ldcflx) as ldcflx\n",
    "from \n",
    "(select industryname , \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as ldcflx\n",
    "from  (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1 \n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  DataType rlike'垄断'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "\n",
    "\n",
    "\n",
    "#无证经营处罚   数据表名：无证经营处罚\n",
    "wzjycfje = f'''\n",
    "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as wzjycfje\n",
    "from \n",
    "(\n",
    "select industryname,\n",
    "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{{0,30}}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
    "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{{0,3}}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
    "\n",
    "\n",
    "from \n",
    "(select   industryname,penalty_document_number,max(penalty_fine_amount) as penalty_fine_amount  ,max(penalty_result) as penalty_result   from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1  where to_date(replace(penalty_date,'/','-')) between to_date(\"{time1}\") and to_date(\"{time2}\")  \n",
    "and  DataType rlike'许可' and DataType not rlike'垄断|贿赂|不正当|广告违规|职业健康|土地管理|价格|骗取|信息披露' \n",
    "and penalty_document_number is not null\n",
    "and penalty_document_number not in ('',' ')\n",
    "group by industryname,penalty_document_number\n",
    ") t\n",
    ") T\n",
    "group by industryname\n",
    "'''\n",
    "\n",
    "wzjycfcs = f''' \n",
    "select industryname as custname,count(distinct penalty_document_number,industryname) as wzjycfcs\n",
    "from \n",
    "(\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") \n",
    "and  DataType rlike'许可' and DataType not rlike'垄断|贿赂|不正当|广告违规|职业健康|土地管理|价格|骗取|信息披露'\n",
    "group by industryname \n",
    "'''\n",
    "\n",
    "wzjycflx = f'''\n",
    "\n",
    "select industryname as custname,max(wzjycflx) as wzjycflx\n",
    "from \n",
    "(select industryname, \n",
    "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
    "    when penalty_result rlike '警告|通报批评' then 2\n",
    "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
    "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
    "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
    "    when penalty_result rlike '行政拘留' then 6 else null end as wzjycflx\n",
    "from (\n",
    "\n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from \n",
    "    trans_c24042 \n",
    "    union \n",
    "    select industryname,penalty_document_number,penalty_fine_amount ,penalty_result,penalty_date,basis_for_penalty,release_time,DataType\n",
    "    from trans_c24021\n",
    "\n",
    "    ) t1\n",
    "where to_date(replace(release_time,'/','-')) between to_date(\"{time1}\")  and to_date(\"{time2}\") and  DataType rlike'许可' and DataType not rlike'垄断|贿赂|不正当|广告违规|职业健康|土地管理|价格|骗取|信息披露'\n",
    ") t \n",
    "group by industryname\n",
    "\n",
    "'''\n",
    "\n",
    "yyzlsl = f''' \n",
    "\n",
    "select \n",
    "\n",
    "qentname as custname , count(distinct sqh) as yyzlsl\n",
    "from ys_rst_zizhi_patents\n",
    "where  to_date(sqrq)  between to_date(\"{time1}\")  and to_date(\"{time2}\")\n",
    "group by qentname \n",
    "\n",
    "''' \n",
    "\n",
    "\n",
    "\n",
    "# s.sql(yyzlsl).show(10)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 7,
   "id": "a7bf7d4b-716f-4809-b5b8-41f2dda2a0ed",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "8f2362b7f4a64c4e83ea386c95d79286",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "<pyspark.sql.session.SparkSession object at 0xfffce6b856d8>"
     ]
    }
   ],
   "source": []
  },
  {
   "cell_type": "code",
   "execution_count": 12,
   "id": "243f2af0-fae5-4482-94d9-04e3fd41f3c1",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "a4b2248bdf2f4f37b642edaafeac654d",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stderr",
     "output_type": "stream",
     "text": [
      "An error was encountered:\n",
      "An error occurred while calling o1632.showString.\n",
      ": java.util.concurrent.ExecutionException: org.apache.spark.SparkException: Job aborted due to stage failure: \n",
      "Aborting TaskSet 806.0 because task 4 (partition 4)\n",
      "cannot run anywhere due to node and executor excludeOnFailure.\n",
      "Most recent failure:\n",
      "Lost task 4.1 in stage 806.0 (TID 54131) (nfplmxglx137 executor 2): java.util.regex.PatternSyntaxException: Dangling meta character '?' near index 0\n",
      "??\n",
      "^\n",
      "\tat java.util.regex.Pattern.error(Pattern.java:1969)\n",
      "\tat java.util.regex.Pattern.sequence(Pattern.java:2137)\n",
      "\tat java.util.regex.Pattern.expr(Pattern.java:2010)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1702)\n",
      "\tat java.util.regex.Pattern.<init>(Pattern.java:1352)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1028)\n",
      "\tat org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage189.init(Unknown Source)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4(WholeStageCodegenExec.scala:752)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4$adapted(WholeStageCodegenExec.scala:749)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2$adapted(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:105)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)\n",
      "\tat org.apache.spark.scheduler.Task.run(Task.scala:131)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:516)\n",
      "\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1604)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:519)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n",
      "\tat java.lang.Thread.run(Thread.java:750)\n",
      "\n",
      "\n",
      "ExcludeOnFailure behavior can be configured via spark.excludeOnFailure.*.\n",
      "\n",
      "\tat java.util.concurrent.FutureTask.report(FutureTask.java:122)\n",
      "\tat java.util.concurrent.FutureTask.get(FutureTask.java:206)\n",
      "\tat org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:515)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeBroadcast$1(SparkPlan.scala:193)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:189)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:203)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareRelation(BroadcastHashJoinExec.scala:219)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:503)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.consume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:548)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.consume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:548)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.consume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:548)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.doProduce(WholeStageCodegenExec.scala:483)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.doProduce$(WholeStageCodegenExec.scala:456)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:655)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:718)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.joins.SortMergeJoinExec.doExecute(SortMergeJoinExec.scala:133)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:525)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:453)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:452)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:746)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.joins.SortMergeJoinExec.doExecute(SortMergeJoinExec.scala:133)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:525)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:453)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:452)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:746)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:321)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:439)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:425)\n",
      "\tat org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:47)\n",
      "\tat org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3718)\n",
      "\tat org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:2737)\n",
      "\tat org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3709)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:111)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:173)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:94)\n",
      "\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:781)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)\n",
      "\tat org.apache.spark.sql.Dataset.withAction(Dataset.scala:3707)\n",
      "\tat org.apache.spark.sql.Dataset.head(Dataset.scala:2737)\n",
      "\tat org.apache.spark.sql.Dataset.take(Dataset.scala:2944)\n",
      "\tat org.apache.spark.sql.Dataset.getRows(Dataset.scala:306)\n",
      "\tat org.apache.spark.sql.Dataset.showString(Dataset.scala:343)\n",
      "\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n",
      "\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n",
      "\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n",
      "\tat java.lang.reflect.Method.invoke(Method.java:498)\n",
      "\tat py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)\n",
      "\tat py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)\n",
      "\tat py4j.Gateway.invoke(Gateway.java:282)\n",
      "\tat py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)\n",
      "\tat py4j.commands.CallCommand.execute(CallCommand.java:79)\n",
      "\tat py4j.GatewayConnection.run(GatewayConnection.java:238)\n",
      "\tat java.lang.Thread.run(Thread.java:750)\n",
      "Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: \n",
      "Aborting TaskSet 806.0 because task 4 (partition 4)\n",
      "cannot run anywhere due to node and executor excludeOnFailure.\n",
      "Most recent failure:\n",
      "Lost task 4.1 in stage 806.0 (TID 54131) (nfplmxglx137 executor 2): java.util.regex.PatternSyntaxException: Dangling meta character '?' near index 0\n",
      "??\n",
      "^\n",
      "\tat java.util.regex.Pattern.error(Pattern.java:1969)\n",
      "\tat java.util.regex.Pattern.sequence(Pattern.java:2137)\n",
      "\tat java.util.regex.Pattern.expr(Pattern.java:2010)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1702)\n",
      "\tat java.util.regex.Pattern.<init>(Pattern.java:1352)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1028)\n",
      "\tat org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage189.init(Unknown Source)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4(WholeStageCodegenExec.scala:752)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4$adapted(WholeStageCodegenExec.scala:749)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2$adapted(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:105)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)\n",
      "\tat org.apache.spark.scheduler.Task.run(Task.scala:131)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:516)\n",
      "\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1604)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:519)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n",
      "\tat java.lang.Thread.run(Thread.java:750)\n",
      "\n",
      "\n",
      "ExcludeOnFailure behavior can be configured via spark.excludeOnFailure.*.\n",
      "\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2378)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2327)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2326)\n",
      "\tat scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)\n",
      "\tat scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)\n",
      "\tat scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2326)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1144)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1144)\n",
      "\tat scala.Option.foreach(Option.scala:407)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1144)\n",
      "\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2571)\n",
      "\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2507)\n",
      "\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2496)\n",
      "\tat org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:931)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2341)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2362)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2381)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2406)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1029)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)\n",
      "\tat org.apache.spark.rdd.RDD.withScope(RDD.scala:413)\n",
      "\tat org.apache.spark.rdd.RDD.collect(RDD.scala:1028)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeCollectIterator(SparkPlan.scala:397)\n",
      "\tat org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.$anonfun$relationFuture$1(BroadcastExchangeExec.scala:118)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:201)\n",
      "\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n",
      "\t... 1 more\n",
      "\n",
      "Traceback (most recent call last):\n",
      "  File \"/srv/BigData/hadoop/data21/nm/localdir/usercache/mxgl_gsyw_sxywtxcs/appcache/application_1710829601063_74974/container_e15_1710829601063_74974_01_000001/pyspark.zip/pyspark/sql/dataframe.py\", line 485, in show\n",
      "    print(self._jdf.showString(n, 20, vertical))\n",
      "  File \"/srv/BigData/hadoop/data21/nm/localdir/usercache/mxgl_gsyw_sxywtxcs/appcache/application_1710829601063_74974/container_e15_1710829601063_74974_01_000001/py4j-0.10.9-src.zip/py4j/java_gateway.py\", line 1305, in __call__\n",
      "    answer, self.gateway_client, self.target_id, self.name)\n",
      "  File \"/srv/BigData/hadoop/data21/nm/localdir/usercache/mxgl_gsyw_sxywtxcs/appcache/application_1710829601063_74974/container_e15_1710829601063_74974_01_000001/pyspark.zip/pyspark/sql/utils.py\", line 111, in deco\n",
      "    return f(*a, **kw)\n",
      "  File \"/srv/BigData/hadoop/data21/nm/localdir/usercache/mxgl_gsyw_sxywtxcs/appcache/application_1710829601063_74974/container_e15_1710829601063_74974_01_000001/py4j-0.10.9-src.zip/py4j/protocol.py\", line 328, in get_return_value\n",
      "    format(target_id, \".\", name), value)\n",
      "py4j.protocol.Py4JJavaError: An error occurred while calling o1632.showString.\n",
      ": java.util.concurrent.ExecutionException: org.apache.spark.SparkException: Job aborted due to stage failure: \n",
      "Aborting TaskSet 806.0 because task 4 (partition 4)\n",
      "cannot run anywhere due to node and executor excludeOnFailure.\n",
      "Most recent failure:\n",
      "Lost task 4.1 in stage 806.0 (TID 54131) (nfplmxglx137 executor 2): java.util.regex.PatternSyntaxException: Dangling meta character '?' near index 0\n",
      "??\n",
      "^\n",
      "\tat java.util.regex.Pattern.error(Pattern.java:1969)\n",
      "\tat java.util.regex.Pattern.sequence(Pattern.java:2137)\n",
      "\tat java.util.regex.Pattern.expr(Pattern.java:2010)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1702)\n",
      "\tat java.util.regex.Pattern.<init>(Pattern.java:1352)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1028)\n",
      "\tat org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage189.init(Unknown Source)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4(WholeStageCodegenExec.scala:752)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4$adapted(WholeStageCodegenExec.scala:749)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2$adapted(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:105)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)\n",
      "\tat org.apache.spark.scheduler.Task.run(Task.scala:131)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:516)\n",
      "\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1604)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:519)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n",
      "\tat java.lang.Thread.run(Thread.java:750)\n",
      "\n",
      "\n",
      "ExcludeOnFailure behavior can be configured via spark.excludeOnFailure.*.\n",
      "\n",
      "\tat java.util.concurrent.FutureTask.report(FutureTask.java:122)\n",
      "\tat java.util.concurrent.FutureTask.get(FutureTask.java:206)\n",
      "\tat org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:515)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeBroadcast$1(SparkPlan.scala:193)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:189)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:203)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareRelation(BroadcastHashJoinExec.scala:219)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:503)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.consume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:548)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.consume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:548)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.consume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter(HashJoin.scala:548)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.codegenOuter$(HashJoin.scala:502)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume(HashJoin.scala:358)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doConsume$(HashJoin.scala:355)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:87)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume(WholeStageCodegenExec.scala:194)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.consume$(WholeStageCodegenExec.scala:149)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.doProduce(WholeStageCodegenExec.scala:483)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.doProduce$(WholeStageCodegenExec.scala:456)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce(HashJoin.scala:352)\n",
      "\tat org.apache.spark.sql.execution.joins.HashJoin.doProduce$(HashJoin.scala:351)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doProduce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.produce(BroadcastHashJoinExec.scala:40)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:54)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.$anonfun$produce$1(WholeStageCodegenExec.scala:95)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.CodegenSupport.produce$(WholeStageCodegenExec.scala:90)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:41)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:655)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:718)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.joins.SortMergeJoinExec.doExecute(SortMergeJoinExec.scala:133)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:525)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:453)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:452)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:746)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.joins.SortMergeJoinExec.doExecute(SortMergeJoinExec.scala:133)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDD(WholeStageCodegenExec.scala:525)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs(WholeStageCodegenExec.scala:453)\n",
      "\tat org.apache.spark.sql.execution.InputRDDCodegen.inputRDDs$(WholeStageCodegenExec.scala:452)\n",
      "\tat org.apache.spark.sql.execution.InputAdapter.inputRDDs(WholeStageCodegenExec.scala:496)\n",
      "\tat org.apache.spark.sql.execution.ProjectExec.inputRDDs(basicPhysicalOperators.scala:50)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:746)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$execute$1(SparkPlan.scala:180)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.$anonfun$executeQuery$1(SparkPlan.scala:218)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:215)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:176)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:321)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:439)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:425)\n",
      "\tat org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:47)\n",
      "\tat org.apache.spark.sql.Dataset.collectFromPlan(Dataset.scala:3718)\n",
      "\tat org.apache.spark.sql.Dataset.$anonfun$head$1(Dataset.scala:2737)\n",
      "\tat org.apache.spark.sql.Dataset.$anonfun$withAction$1(Dataset.scala:3709)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$5(SQLExecution.scala:111)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:173)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:94)\n",
      "\tat org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:781)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:68)\n",
      "\tat org.apache.spark.sql.Dataset.withAction(Dataset.scala:3707)\n",
      "\tat org.apache.spark.sql.Dataset.head(Dataset.scala:2737)\n",
      "\tat org.apache.spark.sql.Dataset.take(Dataset.scala:2944)\n",
      "\tat org.apache.spark.sql.Dataset.getRows(Dataset.scala:306)\n",
      "\tat org.apache.spark.sql.Dataset.showString(Dataset.scala:343)\n",
      "\tat sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)\n",
      "\tat sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)\n",
      "\tat sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)\n",
      "\tat java.lang.reflect.Method.invoke(Method.java:498)\n",
      "\tat py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)\n",
      "\tat py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)\n",
      "\tat py4j.Gateway.invoke(Gateway.java:282)\n",
      "\tat py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)\n",
      "\tat py4j.commands.CallCommand.execute(CallCommand.java:79)\n",
      "\tat py4j.GatewayConnection.run(GatewayConnection.java:238)\n",
      "\tat java.lang.Thread.run(Thread.java:750)\n",
      "Caused by: org.apache.spark.SparkException: Job aborted due to stage failure: \n",
      "Aborting TaskSet 806.0 because task 4 (partition 4)\n",
      "cannot run anywhere due to node and executor excludeOnFailure.\n",
      "Most recent failure:\n",
      "Lost task 4.1 in stage 806.0 (TID 54131) (nfplmxglx137 executor 2): java.util.regex.PatternSyntaxException: Dangling meta character '?' near index 0\n",
      "??\n",
      "^\n",
      "\tat java.util.regex.Pattern.error(Pattern.java:1969)\n",
      "\tat java.util.regex.Pattern.sequence(Pattern.java:2137)\n",
      "\tat java.util.regex.Pattern.expr(Pattern.java:2010)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1702)\n",
      "\tat java.util.regex.Pattern.<init>(Pattern.java:1352)\n",
      "\tat java.util.regex.Pattern.compile(Pattern.java:1028)\n",
      "\tat org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIteratorForCodegenStage189.init(Unknown Source)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4(WholeStageCodegenExec.scala:752)\n",
      "\tat org.apache.spark.sql.execution.WholeStageCodegenExec.$anonfun$doExecute$4$adapted(WholeStageCodegenExec.scala:749)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$mapPartitionsWithIndex$2$adapted(RDD.scala:914)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:105)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:52)\n",
      "\tat org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:373)\n",
      "\tat org.apache.spark.rdd.RDD.iterator(RDD.scala:337)\n",
      "\tat org.apache.spark.shuffle.ShuffleWriteProcessor.write(ShuffleWriteProcessor.scala:59)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:99)\n",
      "\tat org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:52)\n",
      "\tat org.apache.spark.scheduler.Task.run(Task.scala:131)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.$anonfun$run$3(Executor.scala:516)\n",
      "\tat org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1604)\n",
      "\tat org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:519)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n",
      "\tat java.lang.Thread.run(Thread.java:750)\n",
      "\n",
      "\n",
      "ExcludeOnFailure behavior can be configured via spark.excludeOnFailure.*.\n",
      "\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.failJobAndIndependentStages(DAGScheduler.scala:2378)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2(DAGScheduler.scala:2327)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$abortStage$2$adapted(DAGScheduler.scala:2326)\n",
      "\tat scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62)\n",
      "\tat scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55)\n",
      "\tat scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:2326)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1(DAGScheduler.scala:1144)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.$anonfun$handleTaskSetFailed$1$adapted(DAGScheduler.scala:1144)\n",
      "\tat scala.Option.foreach(Option.scala:407)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:1144)\n",
      "\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:2571)\n",
      "\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2507)\n",
      "\tat org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:2496)\n",
      "\tat org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:49)\n",
      "\tat org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:931)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2341)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2362)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2381)\n",
      "\tat org.apache.spark.SparkContext.runJob(SparkContext.scala:2406)\n",
      "\tat org.apache.spark.rdd.RDD.$anonfun$collect$1(RDD.scala:1029)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)\n",
      "\tat org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112)\n",
      "\tat org.apache.spark.rdd.RDD.withScope(RDD.scala:413)\n",
      "\tat org.apache.spark.rdd.RDD.collect(RDD.scala:1028)\n",
      "\tat org.apache.spark.sql.execution.SparkPlan.executeCollectIterator(SparkPlan.scala:397)\n",
      "\tat org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.$anonfun$relationFuture$1(BroadcastExchangeExec.scala:118)\n",
      "\tat org.apache.spark.sql.execution.SQLExecution$.$anonfun$withThreadLocalCaptured$1(SQLExecution.scala:201)\n",
      "\tat java.util.concurrent.FutureTask.run(FutureTask.java:266)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)\n",
      "\tat java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)\n",
      "\t... 1 more\n",
      "\n",
      "\n"
     ]
    }
   ],
   "source": [
    "import  pyspark.sql.functions as F\n",
    "\n",
    "lst = [hjcfzyzlx,hjcfcs,hjcfje,sfblwxtcqy,pwxqzgcs,pwxkzsfzyxqn,hjfxdj,tpfl,hjxypjdj,hjwrfmyq,yjjpjx,sfblwzmqdqy,sfblwqjscqy,wghdbtcflx,wghdbtcfcs,wghdbtcfje,tdlycflx,tdlycfcs,tdlycfje,sfwlszzqy,fxlz,zyjkcflx,zyjkcfcs,zyjkcfje,lgcflx,lgcfcs,lgcfje,sdldjfbscs,nxggzb,sbsffsqjf,fsaqscsgyzcd,aqjglx,aqjgcs,aqjgje,xfaqcflx,xfaqcfcs,xfaqcfje,cpzlcflx,cpzlcfcs,cpzlcfje,yyzzqsl,jscxlxqy,cycsjzdgyhd,mjjdbscs,czgqcs,gqdjzrcs,nsxydj,swcflx,swcfcs,swcfje,swfzch,sfqs,syhljbzdjzcflx,syhljbzdjzcfcs,syhljbzdjzcfje,ldcflx,ldcfcs,ldcfje,jyyccs,jyyclx,yzwfsx,wzjycfje,wzjycfcs,wzjycflx,dcdycs,bdcdycs,dwdbcs,lrsxbzx,qtxzcfje,qtxzcfcs,qtxzcflx,qtajbscs,yyzlsl]\n",
    "df = s.sql('select * from esg_name').distinct()\n",
    "for sql in lst:\n",
    "    # print(sql)\n",
    "    df2 = s.sql(sql)\n",
    "\n",
    "    df = df.join(df2,how='left',on='custname')\n",
    "\n",
    "# df.groupby('custname').agg(F.count('custname').alias('num')).orderBy(F.col('num').desc()).show()\n",
    "\n",
    "\n",
    "\n",
    "#qhfxblcd   1\n",
    "# jnjshbqy\n",
    "# hjxgrz\n",
    "# gykg 1\n",
    "# 金额去重 ，格兰数字单条合并 yz  双向时间修改\n",
    "# 改模型配置表\n",
    "#拉数据跑模型出分\n",
    "df.show()\n",
    "\n",
    "df = df.withColumn(\"jnjshbqy\",F.lit('0')).withColumn('hjxgrz',F.lit('0'))\n",
    "df.columns\n",
    "# df.show(5)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 13,
   "id": "efcd86c9-5fa0-4472-97ea-23ebdd2dc971",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "320d5800775049d6a2df316d6058fbf8",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "78"
     ]
    }
   ],
   "source": [
    "len(df.columns)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 64,
   "id": "8c60888f-06db-4bf5-a887-d94f5e108301",
   "metadata": {
    "collapsed": true,
    "jupyter": {
     "outputs_hidden": true
    },
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "930b6089862748158ef7934d9535370b",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "\n",
      "select industryname as custname,max(hjcfzyzlx) as hjcfzyzlx\n",
      "from \n",
      "(select industryname,\n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as hjcfzyzlx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "and  basis_for_penalty rlike'环境保护|污染环境|环境卫生|河道|施工现场|城市管理|垃圾' \n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|        吉林省赫天物流有限公司|  1|\n",
      "|深圳市恒强伟业电子科技有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as hjcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "and  basis_for_penalty rlike'环境保护|污染环境|环境卫生|河道|施工现场|城市管理|垃圾' \n",
      "group by industryname \n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|        昆明明超电缆有限公司|  1|\n",
      "|福州工业园区开发集团有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as hjcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and  basis_for_penalty rlike'环境保护|污染环境|环境卫生|河道|施工现场|城市管理|垃圾' \n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+--------------------------------+---+\n",
      "|                        custname|num|\n",
      "+--------------------------------+---+\n",
      "|            山西二建集团有限公司|  1|\n",
      "|中国电子系统工程第二建设有限公司|  1|\n",
      "+--------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select distinct industryname as custname,1 as sfblwxtcqy\n",
      "from  trans_c24030 \n",
      "where industryname not in (select industryname as a from trans_c24036 ) \n",
      "and  to_date(replace(start_time,'/','-')) < to_date(\"2020-01-23\")  or  to_date(replace(end_time,'/','-')) >to_date(\"2024-12-31\")\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|河南易成瀚博能源科技有限公司|  1|\n",
      "|    西安科迅机械制造有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct rectification_period_start_time,rectification_period_end_time,industryname) as pwxqzgcs\n",
      "from trans_c24023\n",
      "where to_date(replace(rectification_period_start_time,'/','-')) > to_date(\"2020-01-23\")  or  to_date(replace(rectification_period_end_time,'/','-')) < to_date(\"2024-12-31\")\n",
      "group by industryname\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|    云南盛川钢结构有限公司|  1|\n",
      "|山东新富瑞农业科技有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct industryname as custname,1 as pwxkzsfzyxqn\n",
      "from trans_c24032\n",
      "where  to_date(replace(validity_period_end_time,'/','-')) > to_date(\"2024-12-31\") \n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|    临海天宇药业有限公司|  1|\n",
      "|东北制药集团股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select  industryname as custname,max(hjfxdj) as hjfxdj\n",
      "from \n",
      "(select industryname,\n",
      "    case when risk_level rlike '其他' Then 1 \n",
      "        when risk_level rlike '一般' then 2\n",
      "        when risk_level rlike '较大' then 3\n",
      "        when risk_level rlike '重大' then 4\n",
      "        else null end as hjfxdj\n",
      "from trans_c24029\n",
      "where  year_of_environmental_risk_enterprise_list > to_date(\"2020\")  \n",
      ") t\n",
      "group by industryname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|  浙江科力森化学有限公司|  1|\n",
      "|柳州五菱汽车工业有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname,sum(case when total_co2_emissions_from_all_units is null then 0 else total_co2_emissions_from_all_units end ) as tpfl\n",
      "from   trans_c24037\n",
      "where carbon_data_year > to_date(\"2020\")\n",
      "group by industryname\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|            海伟石化有限公司|  1|\n",
      "|绍兴市柯桥区怡中染整有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "\n",
      "  select t1.industryname as custname,\n",
      " \tmax(case when rating_result_announcement like '%绿%' then 1 \n",
      "      when rating_result_announcement like '%蓝%' then 2 \n",
      "      when rating_result_announcement like '%蓝%' and flag =1  then 3\n",
      "      when rating_result_announcement like '%黄%' then 4 \n",
      "      when rating_result_announcement like '%黄%' and flag =1  then 5 \n",
      "       when rating_result_announcement like '%红%' then 6 \n",
      "        when rating_result_announcement like '%黑%' then 7 end ) as  hjxypjdj\n",
      "from \n",
      "\n",
      "\n",
      "( \n",
      "  select industryname, \n",
      "case when  rating_result_announcement is null or  rating_result_announcement = '' or rating_result_announcement = ' ' then rating_result_released\n",
      "else rating_result_announcement\n",
      "  end as rating_result_announcement \n",
      "from trans_c24024\n",
      "where enterprise_environmental_rating_year > to_date(\"2020\") \n",
      ")\n",
      "t1 left join \n",
      "(\n",
      "select distinct industryname,1 as flag \n",
      "from trans_c24026 \n",
      " where year > to_date(\"2020\")\n",
      "union\n",
      "select distinct industryname,1 as flag \n",
      "from trans_c24027 \n",
      " where key_energy_consumption_year > to_date(\"2020\")\n",
      "union\n",
      "select distinct industryname,1 as flag \n",
      "from trans_c24038 \n",
      " where year_of_carbon_key_polluting_units  > to_date(\"2020\")\n",
      ") t2\n",
      "on t1.industryname = t2.industryname\n",
      "\n",
      "group by  t1.industryname\n",
      "\n",
      "\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|绍兴柯桥宇盛针纺整理有限公司|  1|\n",
      "|  欣旺达动力科技股份有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct companyname   as custname,1 as hjwrfmyq \n",
      "from fh_stg_news\n",
      "where body rlike '环境|污染'\n",
      "and to_date(replace(sorttime,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|  唐山德伯特机械有限公司|  1|\n",
      "|重庆洪九果品股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(yjjpjx) as yjjpjx\n",
      "from \n",
      "(\n",
      "select industryname , \n",
      "case when control_type like '%A%' then 1  \n",
      " when control_type like '%引领性%' then 2 \n",
      " when control_type like '%B-%' then 4\n",
      "  when control_type like '%B%' then 3 \n",
      "    when control_type like '%保障类%' then 5\n",
      "     when control_type like '%C%' then 6\n",
      "      when control_type like '%其他%' then 7\n",
      "       when control_type like '%D%' then 8\n",
      "        when control_type like '%非引领性%' then 9\n",
      "         when control_type like '%长期停产%' then 10 \n",
      "         else null end as yjjpjx\n",
      "from trans_c24031\n",
      "where year_of_enterprise_performance_grading  > to_date(\"2020\")\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "+--------------------------------+---+\n",
      "|                        custname|num|\n",
      "+--------------------------------+---+\n",
      "|美克国际家私（天津）制造有限公司|  1|\n",
      "|        江苏扬建钢构科技有限公司|  1|\n",
      "+--------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct industryname as custname,1 as sfblwzmqdqy\n",
      "from trans_c24034\n",
      "where to_date(replace(control_start_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")  or  to_date(replace(control_end_time,'/','-')) > to_date(\"2024-12-31\")\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|广西来宾东糖凤凰有限公司|  1|\n",
      "|安徽楚江高新电材有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct industryname as custname,1 as sfblwqjscqy\n",
      "from trans_c24028\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|胜宏科技（惠州）股份有限公司|  1|\n",
      "|        阳新弘盛铜业有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(wghdbtcflx) as wghdbtcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as wghdbtcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'价格|骗取'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|岱山县海纳数智运营有限公司|  1|\n",
      "|      绵阳云豪集团有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as wghdbtcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  DataType rlike'价格|骗取'\n",
      "group by industryname \n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|    山西国锦煤电有限公司|  1|\n",
      "|嘉善县幽澜自来水有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as wghdbtcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and  DataType rlike'价格|骗取'\n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|岱山县海纳数智运营有限公司|  1|\n",
      "|淮安曙光国际大酒店有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(tdlycflx) as tdlycflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike  '警告|通报批评' then 2\n",
      "    when penalty_result rlike  '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike  '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike  '行政拘留' then 6 else null end as tdlycflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")  and  DataType rlike'土地管理'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|  安徽建工第二建设集团有限公司|  1|\n",
      "|重庆市垫江卧龙隔热材料有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as tdlycfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  DataType rlike'土地管理'\n",
      "group by industryname \n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|甘肃省公路交通建设集团有限公司|  1|\n",
      "|                淮安市淮安医院|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as tdlycfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and  DataType rlike'土地管理'\n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+--------------------------------+---+\n",
      "|                        custname|num|\n",
      "+--------------------------------+---+\n",
      "|            丹徒新区建设发展公司|  1|\n",
      "|河南大有能源股份有限公司新安煤矿|  1|\n",
      "+--------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct industryname as custname,1 as sfwlszzqy\n",
      "from trans_c24035\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|  广西北港新材料有限公司|  1|\n",
      "|河南豫光金铅股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select  distinct qentname  as custname,1 as fxlz\n",
      "from ys_rst_zizhi_chinamoneybond \n",
      "where   to_date(replace(sdate,'/','-'))  between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+--------+---+\n",
      "|custname|num|\n",
      "+--------+---+\n",
      "+--------+---+\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(zyjkcflx) as zyjkcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as zyjkcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'职业健康'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+-------------------------------------+---+\n",
      "|                             custname|num|\n",
      "+-------------------------------------+---+\n",
      "|           福建省长鸿建设集团有限公司|  1|\n",
      "|晋能控股装备制造集团华昱能源化工山...|  1|\n",
      "+-------------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as zyjkcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'职业健康'\n",
      "group by industryname \n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|阳泉市上社二景煤炭有限责任公司|  1|\n",
      "|      周口通材物资供应有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as zyjkcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and  DataType rlike'职业健康'\n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|深圳市欣隆华科技有限公司|  1|\n",
      "|昆明华创机械制造有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(lgcflx) as lgcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as lgcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  trim(DataType) like'劳工保障'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|    武汉聚亚美新材料有限公司|  1|\n",
      "|上海绿地建设（集团）有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as lgcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  trim(DataType) like'劳工保障'\n",
      "group by industryname \n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|    武汉聚亚美新材料有限公司|  1|\n",
      "|新疆城建（集团）股份有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as lgcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and trim(DataType) like'劳工保障'\n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|中国机械工业建设集团有限公司|  1|\n",
      "|    中建二局安装工程有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select companyname  as custname,count(distinct companyname,caseno) as sdldjfbscs\n",
      "from \n",
      "fh_stg_ktgg\n",
      "where to_date(replace(sorttime,'/','-'))  between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "and body rlike '劳动合同纠纷|劳动争议'\n",
      "group by companyname\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|        中天钢铁集团有限公司|  1|\n",
      "|徐州中央百货大楼股份有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname,\n",
      "sum(case when gender = '女' then 1 else 0 end)/count(gender)  as nxggzb\n",
      "from (select distinct * from ys_rst_zizhi_stockmanagers) T\n",
      "where gender is not null and \n",
      "\n",
      "(enddate between    to_date(\"2020-01-23\") and to_date(\"2024-12-31\")    or startdate between to_date(\"2020-01-23\")  and  to_date(\"2024-12-31\") )\n",
      "\n",
      "group by qentname\n",
      "\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|            鸿博股份有限公司|  1|\n",
      "|苏州苏试试验集团股份有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct qentname  as custname,1 as sbsffsqjf\n",
      "from ys_rst_deep_yearreportsocsecs \n",
      "where cast( unpaidsocialinsso110 as decimal(10,2)) > 0 \n",
      "or  cast( unpaidsocialinsso210 as decimal(10,2)) > 0 \n",
      "or  cast( unpaidsocialinsso310 as decimal(10,2)) > 0\n",
      "or  cast( unpaidsocialinsso410 as decimal(10,2)) > 0\n",
      "or  cast( unpaidsocialinsso510 as decimal(10,2)) > 0\n",
      "\n",
      "+----------------------+---+\n",
      "|              custname|num|\n",
      "+----------------------+---+\n",
      "|永康市亚亨工贸有限公司|  1|\n",
      "|  浙江宇辉电子有限公司|  1|\n",
      "+----------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select \n",
      "industryname   as custname,\n",
      "case when  cast(number_of_fatalities as decimal(10,2)) > 30 or cast(number_of_serious_injuries as decimal(10,2)) > 100 or  cast(economic_loss as decimal(10,2)) > 10000 then 4 \n",
      "when cast(number_of_fatalities as decimal(10,2)) between 10 and 30 or  cast(number_of_serious_injuries as decimal(10,2)) between  50 and 100 or   cast(economic_loss as decimal(10,2)) between 5000 and 10000 then 3 \n",
      "when cast(number_of_fatalities as decimal(10,2)) between 3 and 10 or  cast(number_of_serious_injuries as decimal(10,2)) between  10 and 50 or   cast(economic_loss as decimal(10,2)) between 1000 and 5000 then 2 \n",
      "else 1 end as fsaqscsgyzcd\n",
      "from trans_c24041\n",
      "where to_date(replace('accident_time','/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")\n",
      "\n",
      "+--------+---+\n",
      "|custname|num|\n",
      "+--------+---+\n",
      "+--------+---+\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(aqjglx) as aqjglx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as aqjglx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and basis_for_penalty rlike'安全生产|安全管理|安全监督'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|长沙楚元中西医结合医院有限公司|  1|\n",
      "|  兰州国资利民物业管理有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as aqjgcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and basis_for_penalty rlike'安全生产|安全管理|安全监督'\n",
      "group by industryname \n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|武汉汉石石油运输有限公司|  1|\n",
      "|江苏南通二建集团有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as aqjgje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and basis_for_penalty rlike'安全生产|安全管理|安全监督' \n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|        长业建设集团有限公司|  1|\n",
      "|兰州国资利民物业管理有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(xfaqcflx) as xfaqcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as xfaqcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'消防' \n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|永康市万佳晓工贸有限公司|  1|\n",
      "|    温州海泰鞋业有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as xfaqcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'消防' \n",
      "group by industryname \n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|云南省建设投资控股集团有限公司|  1|\n",
      "|      聊城信迪科技开发有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as xfaqcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and DataType rlike'消防'  \n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|宁夏德昊科技产业有限公司|  1|\n",
      "|芜湖国风塑胶科技有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(cpzlcflx) as cpzlcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as cpzlcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and trim(DataType) like'产品质量' \n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|芜湖市绿丰建设集团有限公司|  1|\n",
      "|中国建筑第二工程局有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as cpzlcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and trim(DataType) like'产品质量'  \n",
      "group by industryname \n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|屏边福民农产品有限责任公司|  1|\n",
      "|建材广州工程勘测院有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as cpzlcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and trim(DataType) like'产品质量' \n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|      中南勘察基础工程有限公司|  1|\n",
      "|深圳市宝鹰建设集团股份有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname,count(distinct qentname,anndate) as yyzzqsl\n",
      "from \n",
      "(\n",
      "select qentname,anndate\n",
      "from  ys_rst_zizhi_products\n",
      "union \n",
      "select qentname,anndate\n",
      "from ys_rst_zizhi_softwares\n",
      ") t \n",
      "where anndate between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by qentname\n",
      "\n",
      "+------------------------------------+---+\n",
      "|                            custname|num|\n",
      "+------------------------------------+---+\n",
      "|    母婴友好（武汉）健康服务有限公司|  1|\n",
      "|郑州建中建设开发（集团）有限责任公司|  1|\n",
      "+------------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select  distinct companyname  as custname,1 as jscxlxqy \n",
      "from fh_stg_news\n",
      "where body rlike '科技中小型企业|专精特新企业科改示范型企业|高新技术企业'\n",
      "and to_date(replace(sorttime,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|洛阳香江万基铝业有限公司|  1|\n",
      "|世纪恒通科技股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct companyname  as custname,1 as cycsjzdgyhd\n",
      "from fh_stg_news\n",
      "where body rlike '慈善|公益|捐款|捐赠|扶贫'\n",
      "and to_date(replace(sorttime,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|        四川鑫电电缆有限公司|  1|\n",
      "|北京万界数据科技有限责任公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select companyname  as custname,count(distinct companyname,caseno) as mjjdbscs\n",
      "from \n",
      " fh_stg_ktgg\n",
      " t\n",
      "where  to_date(replace(sorttime,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "and body rlike '民间借贷'\n",
      "group by companyname \n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|十一冶建设集团有限责任公司|  1|\n",
      "|江苏塔伦特汽车部件有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname,count(qentname,stkpawndate) as czgqcs\n",
      "from ys_rst_deep_stockpawns\n",
      "where  to_date(replace(stkpawndate,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by qentname \n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|      山东史泰丰肥业有限公司|  1|\n",
      "|上海宇熠电气设备制造有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname,count(distinct qentname,frofrom,froto) as gqdjzrcs\n",
      "from ys_rst_deep_judicialaiddetails\n",
      "where   (to_date(replace(frofrom,'/','-')) between    to_date(\"2020-01-23\") and to_date(\"2024-12-31\")    or to_date(replace( froto,'/','-')) between to_date(\"2020-01-23\")  and  to_date(\"2024-12-31\") )\n",
      "group by qentname \n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|江门市恒泰电子智能科技有限公司|  1|\n",
      "|      合肥品能光伏科技有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select \n",
      "companyname   as custname,\n",
      "min(case when eventresult like '%A%' then 1\n",
      "when eventresult like '%B%' then 2\n",
      "when eventresult like '%C%' then 3 \n",
      "when eventresult like '%D%' then 4\n",
      "when eventresult like '%M%' then 5 \n",
      "else null end ) as nsxydj\n",
      "\n",
      "from  fh_stg_satparty_xin\n",
      "where to_date(replace(sorttime,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by companyname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|中铁物资集团中南有限公司|  1|\n",
      "|重庆邦博装饰工程有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(swcflx) as swcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as swcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'税务'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|厦门市铁晟进出口有限公司|  1|\n",
      "|    山河建设集团有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as swcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'税务'\n",
      "group by industryname \n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|  浙江宏达化学制品有限公司|  1|\n",
      "|苏州吉登自动化科技有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname  as custname,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as swcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      " and DataType rlike'税务'\n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|厦门市铁晟进出口有限公司|  1|\n",
      "|浙江宏达化学制品有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select companyname  as custname,1 as swfzch\n",
      "from fh_stg_satparty_fzc \n",
      "where to_date(replace(posttime,'/','-'))  between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|深圳前海联动云汽车租赁有限公司|  6|\n",
      "|      武汉锦康餐饮管理有限公司|  4|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct qentname  as custname,1 as sfqs\n",
      "from ys_rst_sifainfo_qsggents\n",
      "where  to_date(replace( pubtime,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|  中能祥瑞电力工程有限公司|  1|\n",
      "|苏州得一鲜食品科技有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(syhljbzdjzcflx) as syhljbzdjzcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as syhljbzdjzcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'贿赂|不正当|广告违规'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|温州佳盛乳胶制品有限公司|  1|\n",
      "|长兴广电报业广告有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as syhljbzdjzcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and DataType rlike'贿赂|不正当|广告违规' \n",
      "group by industryname \n",
      "\n",
      "+------------------------------+---+\n",
      "|                      custname|num|\n",
      "+------------------------------+---+\n",
      "|      北京秋实农业股份有限公司|  1|\n",
      "|济南博爱中西医结合医院有限公司|  1|\n",
      "+------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as syhljbzdjzcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and DataType rlike'贿赂|不正当|广告违规' \n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|  温州佳盛乳胶制品有限公司|  1|\n",
      "|深圳市宁远科技股份有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(ldcflx) as ldcflx\n",
      "from \n",
      "(select industryname , \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as ldcflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  DataType rlike'垄断'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|东北制药集团股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as ldcfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  DataType rlike'垄断' \n",
      "group by industryname \n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|东北制药集团股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as ldcfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and   DataType rlike'垄断' \n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|东北制药集团股份有限公司|  1|\n",
      "+------------------------+---+\n",
      "\n",
      "--------------------\n",
      "\n",
      "select distinct qentname  as custname,count(distinct qentname,indate)\n",
      "from ys_rst_deep_exceptions\n",
      "where  to_date(replace( indate,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by qentname  \n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|    杭州讯杰通信设备有限公司|  1|\n",
      "|北京英印威尔物业管理有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select qentname   as custname,\n",
      "max(case when outreason > 0 then outreason else inreason end) as jyyclx\n",
      "from \n",
      "\n",
      "(\n",
      "select qentname ,\n",
      "case \n",
      " when inreason rlike '信息隐瞒|弄虚作假' then  3\n",
      "when inreason rlike '未依照.*公示|未按照.*公示' then 2 \n",
      "when inreason rlike '经营场所无法联系的' then 1 \n",
      "else null end as inreason,\n",
      "case when outreason rlike '严重违法' then 4 else null end as outreason\n",
      "from ys_rst_deep_exceptions \n",
      "where  to_date(replace( indate,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      ")\n",
      "t\n",
      "group by qentname\n",
      "\n",
      "+----------------------+---+\n",
      "|              custname|num|\n",
      "+----------------------+---+\n",
      "|深圳市馨意美妆有限公司|  1|\n",
      "|深圳市华创服饰有限公司|  1|\n",
      "+----------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select  distinct qentname  as custname,1 as yzwfsx \n",
      "from ys_rst_shixin_data\n",
      "where to_date(indate) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\") and (oudate is null or oudate = '' or oudate = ' ' )\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|      中煤地建设工程有限公司|  1|\n",
      "|韩城市城市投资(集团)有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select industryname as custname ,sum( (coalesce(yuan,yuan3,yuan2,0) + coalesce(moushou1,0)) )as wzjycfje\n",
      "from \n",
      "(\n",
      "select industryname,\n",
      "case when  cast(penalty_fine_amount as decimal(10,2))  = 0 then null else penalty_fine_amount * 10000 end as yuan ,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(penalty_result,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan3\n",
      "\n",
      "\n",
      "from \n",
      "(select * from \n",
      "trans_c24042  where to_date(replace(penalty_date,'/','-')) between to_date(\"2020-01-23\") and to_date(\"2024-12-31\")  \n",
      "and  DataType rlike'许可' and DataType not rlike'垄断|贿赂|不正当|广告违规|职业健康|土地管理|价格|骗取|信息披露'\n",
      ") t\n",
      ") T\n",
      "group by industryname\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|宁夏德昊科技产业有限公司|  1|\n",
      "|        海伟石化有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select industryname as custname,count(distinct penalty_document_number,industryname) as wzjycfcs\n",
      "from \n",
      "trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") \n",
      "and  DataType rlike'许可' and DataType not rlike'垄断|贿赂|不正当|广告违规|职业健康|土地管理|价格|骗取|信息披露'\n",
      "group by industryname \n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|  十一冶建设集团有限责任公司|  1|\n",
      "|泗阳城南新城实业投资有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select industryname as custname,max(wzjycflx) as wzjycflx\n",
      "from \n",
      "(select industryname, \n",
      "case when penalty_result rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when penalty_result rlike '警告|通报批评' then 2\n",
      "    when penalty_result rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when penalty_result rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when penalty_result rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when penalty_result rlike '行政拘留' then 6 else null end as wzjycflx\n",
      "from trans_c24042\n",
      "where to_date(replace(release_time,'/','-')) between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\") and  DataType rlike'许可' and DataType not rlike'垄断|贿赂|不正当|广告违规|职业健康|土地管理|价格|骗取|信息披露'\n",
      ") t \n",
      "group by industryname\n",
      "\n",
      "\n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|    上海宝冶集团有限公司|  1|\n",
      "|南通五建控股集团有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname ,count(distinct mabregdate,qentname)  as dcdycs\n",
      "from ys_rst_deep_mortgagebasics\n",
      "where  to_date(replace( mabregdate,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by qentname\n",
      "\n",
      "+--------------------+---+\n",
      "|            custname|num|\n",
      "+--------------------+---+\n",
      "|  新思考电机有限公司|  1|\n",
      "|河北冀春化工有限公司|  1|\n",
      "+--------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname ,count(distinct sdate,qentname)  as bdcdycs\n",
      "from ys_rst_zizhi_landmort \n",
      "where sdate between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by qentname \n",
      "\n",
      "+--------+---+\n",
      "|custname|num|\n",
      "+--------+---+\n",
      "+--------+---+\n",
      "\n",
      "--------------------\n",
      " \n",
      "select qentname   as custname,count(distinct pefperform,pefperto,qentname)  as dwdbcs\n",
      "from ys_rst_deep_yearreportforguarantees\n",
      "where( pefperform between  to_date(\"2020-01-23\")  and   to_date(\"2024-12-31\") ) or  (pefperto  between to_date(\"2020-01-23\")  and   to_date(\"2024-12-31\") )\n",
      "group by qentname\n",
      "\n",
      "+------------------------------------+---+\n",
      "|                            custname|num|\n",
      "+------------------------------------+---+\n",
      "|                成都铸康实业有限公司|  1|\n",
      "|泉州市丰泽区国有资产投资经营有限公司|  1|\n",
      "+------------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select qentname  as custname,1 as lrsxbzx\n",
      "from \n",
      "(\n",
      "select qentname,regdateclean\n",
      "from\n",
      "ys_rst_deep_punishbreaks \n",
      "union\n",
      "select qentname,fsxlasj as regdateclean\n",
      "from \n",
      "ys_rst_sifainfo_caselesscredits  \n",
      "union\n",
      "select qentname ,regdateclean\n",
      "from \n",
      "ys_rst_deep_punisheds   \n",
      ")\n",
      "\n",
      "where  to_date(replace( regdateclean,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "\n",
      "+--------------------------------+---+\n",
      "|                        custname|num|\n",
      "+--------------------------------+---+\n",
      "|            山河建设集团有限公司|449|\n",
      "|江苏中南建筑产业集团有限责任公司|295|\n",
      "+--------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select\n",
      "companyname  as custname,\n",
      "sum(coalesce(yuan,yuan1,wan,wan2,wan3,yuan2,yuan3,yuan4) + coalesce(moushou1,0)) as xycfje\n",
      "\n",
      "from\n",
      "\n",
      "(\n",
      "select companyname,old,eventresult,\n",
      "cast(REGEXP_EXTRACT(eventresult,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan,\n",
      "cast(REGEXP_EXTRACT(eventresult,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)$',1) as decimal(10,2)) as yuan1,\n",
      "cast(REGEXP_EXTRACT(eventresult,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan,\n",
      "cast(REGEXP_EXTRACT(eventresult,'^([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan2,\n",
      "cast(REGEXP_EXTRACT(eventresult,'(?:处|处以)\\D{0,3}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan3,\n",
      "cast(REGEXP_EXTRACT(eventresult,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(eventresult,'（￥?([0-9\\.]+)元?）',1) as decimal(10,2)) as yuan3,\n",
      "cast(REGEXP_EXTRACT(eventresult,'(^[0-9\\.]+$)',1) as decimal(10,2)) as yuan4,\n",
      "cast(REGEXP_EXTRACT(eventresult,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1\n",
      "\n",
      "from \n",
      "(\n",
      "select companyname,sorttime,eventresult as old ,\n",
      "REPLACE(REPLACE(\n",
      "REPLACE(\n",
      "        REPLACE(\n",
      "                REPLACE(\n",
      "                        REPLACE(\n",
      "                                REPLACE(\n",
      "                                        REPLACE(\n",
      "                                                REPLACE(\n",
      "                                                        REPLACE(\n",
      "                                                                REPLACE(\n",
      "                                                                        REPLACE(\n",
      "                                                                                REPLACE(\n",
      "                                                                                        REPLACE(\n",
      "                                                                                                REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(eventresult,':',''),'壹','1'),'贰',2),'叁',3),'肆',4),'伍',5)\n",
      "                                                                                        ,'陆',6)\n",
      "                                                                                ,'柒',7)\n",
      "                                                                        ,'捌',8)\n",
      "                                                                ,'玖',9)\n",
      "                                                        ,'五',5)\n",
      "                                                ,'一',1)\n",
      "                                        ,'二',2)\n",
      "                                ,'三',3)\n",
      "                        ,'四',4)\n",
      "                ,'六',6)\n",
      "        ,'七',7)\n",
      ",'八',8)\n",
      ",'九',9),'玫',9) as eventresult\n",
      "from \n",
      "(select companyname,eventresult,sorttime from \n",
      "fh_stg_credit_chufa where eventresult <> '' and eventresult is not null and to_date(replace( sorttime,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")) t\n",
      ") T\n",
      ") T2\n",
      "group by companyname\n",
      "\n",
      "+--------------------------------+---+\n",
      "|                        custname|num|\n",
      "+--------------------------------+---+\n",
      "|重庆北渡园区建设开发股份有限公司|  1|\n",
      "|        南京久大建设集团有限公司|  1|\n",
      "+--------------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select companyname  as custname ,count(distinct yjcode,companyname) as xycfcs\n",
      "from fh_stg_credit_chufa\n",
      "where to_date(replace( sorttime,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by companyname\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|    江苏万象建工集团有限公司|  1|\n",
      "|绍兴柯桥宇盛针纺整理有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select companyname  as custname,max(xycflx) as xycflx\n",
      "from \n",
      "(select companyname , \n",
      "case when eventtype rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when eventtype rlike '警告|通报批评' then 2\n",
      "    when eventtype rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when eventtype rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when eventtype rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when eventtype rlike '行政拘留' then 6 else null end as xycflx\n",
      "from fh_stg_credit_chufa\n",
      "where to_date(replace( sorttime,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      ") t \n",
      "group by companyname\n",
      "\n",
      "+--------------------------+---+\n",
      "|                  custname|num|\n",
      "+--------------------------+---+\n",
      "|中铁北京工程局集团有限公司|  1|\n",
      "|山东鑫大地控股集团有限公司|  1|\n",
      "+--------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select\n",
      "qentname as custname,\n",
      "sum(coalesce(yuan,yuan1,wan,wan2,wan3,yuan2,yuan3,yuan4) + coalesce(moushou1,0)) as qtxzcfje\n",
      "\n",
      "from\n",
      "\n",
      "(\n",
      "select qentname,old,pencontent,\n",
      "cast(REGEXP_EXTRACT(pencontent,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan,\n",
      "cast(REGEXP_EXTRACT(pencontent,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)$',1) as decimal(10,2)) as yuan1,\n",
      "cast(REGEXP_EXTRACT(pencontent,'(?:罚|罚款|人民币)\\D{0,3}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan,\n",
      "cast(REGEXP_EXTRACT(pencontent,'^([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan2,\n",
      "cast(REGEXP_EXTRACT(pencontent,'(?:处|处以)\\D{0,3}([0-9\\.]+)万元',1) as decimal(10,2)) * 10000 as wan3,\n",
      "cast(REGEXP_EXTRACT(pencontent,'(?:处|处以)\\D{0,3}([0-9\\.]+)元',1) as decimal(10,2)) as yuan2,\n",
      "cast(REGEXP_EXTRACT(pencontent,'（￥?([0-9\\.]+)元?）',1) as decimal(10,2)) as yuan3,\n",
      "cast(REGEXP_EXTRACT(pencontent,'(^[0-9\\.]+$)',1) as decimal(10,2)) as yuan4,\n",
      "cast(REGEXP_EXTRACT(pencontent,'没收\\D{0,30}([0-9\\.]+)元',1) as decimal(10,2)) as moushou1\n",
      "\n",
      "from \n",
      "(\n",
      "select qentname,pendecissdate,pencontent as old ,\n",
      "REPLACE(REPLACE(\n",
      "REPLACE(\n",
      "        REPLACE(\n",
      "                REPLACE(\n",
      "                        REPLACE(\n",
      "                                REPLACE(\n",
      "                                        REPLACE(\n",
      "                                                REPLACE(\n",
      "                                                        REPLACE(\n",
      "                                                                REPLACE(\n",
      "                                                                        REPLACE(\n",
      "                                                                                REPLACE(\n",
      "                                                                                        REPLACE(\n",
      "                                                                                                REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(REPLACE(pencontent,':',''),'壹','1'),'贰',2),'叁',3),'肆',4),'伍',5)\n",
      "                                                                                        ,'陆',6)\n",
      "                                                                                ,'柒',7)\n",
      "                                                                        ,'捌',8)\n",
      "                                                                ,'玖',9)\n",
      "                                                        ,'五',5)\n",
      "                                                ,'一',1)\n",
      "                                        ,'二',2)\n",
      "                                ,'三',3)\n",
      "                        ,'四',4)\n",
      "                ,'六',6)\n",
      "        ,'七',7)\n",
      ",'八',8)\n",
      ",'九',9),'玫',9) as pencontent\n",
      "from \n",
      "(select qentname,pencontent,pendecissdate from \n",
      "ys_rst_deep_entcasebaseinfos where pencontent <> '' and pencontent is not null and to_date(replace( pendecissdate,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")) t\n",
      ") T\n",
      ") T2\n",
      "group by qentname\n",
      "\n",
      "+----------------------+---+\n",
      "|              custname|num|\n",
      "+----------------------+---+\n",
      "|广州康瑞泰药业有限公司|  1|\n",
      "|  诸暨菲达酒楼有限公司|  1|\n",
      "+----------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      " \n",
      "select qentname  as custname,count(distinct pendecno,qentname) as qtxzcfcs\n",
      "from \n",
      "ys_rst_deep_entcasebaseinfos\n",
      "where  to_date(replace( pendecissdate,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "group by qentname \n",
      " \n",
      "+------------------------+---+\n",
      "|                custname|num|\n",
      "+------------------------+---+\n",
      "|明峰医疗系统股份有限公司|  1|\n",
      "|    宜良安兴建材有限公司|  1|\n",
      "+------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "\n",
      "select qentname  as custname,max(qtxzcflx) as qtxzcflx\n",
      "from \n",
      "(select qentname , \n",
      "case when pencontent rlike '法律规定的其他行政处罚|行政法规规定其他行政处罚|法律[.]行政法规规定其他行政处罚' then 1\n",
      "    when pencontent rlike '警告|通报批评' then 2\n",
      "    when pencontent rlike '罚款|没收违法所得|没收非法财物' then 3\n",
      "    when pencontent rlike '暂扣许可证件|降低资质等级|吊销许可证件' then 4 \n",
      "    when pencontent rlike '限制开展生产经营活动|责令停产停业|责令关闭|限制从业' then 5 \n",
      "    when pencontent rlike '行政拘留' then 6 else null end as qtxzcflx\n",
      "from ys_rst_deep_entcasebaseinfos\n",
      "where to_date(replace( pendecissdate,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      ") t \n",
      "group by qentname\n",
      "\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|        山西高义钢铁有限公司|  1|\n",
      "|重庆空港经济开发建设有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------\n",
      "\n",
      "select companyname  as custname,count(distinct body,caseno) as qtajbscs\n",
      "from \n",
      "\n",
      "\n",
      " fh_stg_ktgg\n",
      "\n",
      "where   to_date(replace( sorttime,'/','-'))   between to_date(\"2020-01-23\")  and to_date(\"2024-12-31\")\n",
      "and body not rlike '劳动合同纠纷|劳动争议|民间借贷'\n",
      "group by companyname\n",
      "\n",
      "+----------------------------+---+\n",
      "|                    custname|num|\n",
      "+----------------------------+---+\n",
      "|中国航空技术国际工程有限公司|  1|\n",
      "|上海凯泉泵业（集团）有限公司|  1|\n",
      "+----------------------------+---+\n",
      "only showing top 2 rows\n",
      "\n",
      "--------------------"
     ]
    }
   ],
   "source": [
    "import  pyspark.sql.functions as F\n",
    "for sql in lst:\n",
    "    df = s.sql(sql)#.groupBy(\"custname\").agg(F.sum(F.col(custname)).alias('num')).orderby(F.col(num),ascdening=False)\n",
    "    df2 = df.groupby(F.col('custname')).agg(F.count(\"custname\").alias('num')).orderBy(F.col('num').desc())\n",
    "    print(sql)\n",
    "    df2.show(2)\n",
    "    print('-'*20)"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 46,
   "id": "831b0076-d375-495a-8285-4d41415de032",
   "metadata": {
    "tags": []
   },
   "outputs": [
    {
     "data": {
      "application/vnd.jupyter.widget-view+json": {
       "model_id": "befe5569207d401ca675b951d8c2b669",
       "version_major": 2,
       "version_minor": 0
      },
      "text/html": [
       "<p>Failed to display Jupyter Widget of type <code>FloatProgress</code>.</p>\n",
       "<p>\n",
       "  If you're reading this message in the Jupyter Notebook or JupyterLab Notebook, it may mean\n",
       "  that the widgets JavaScript is still loading. If this message persists, it\n",
       "  likely means that the widgets JavaScript library is either not installed or\n",
       "  not enabled. See the <a href=\"https://ipywidgets.readthedocs.io/en/stable/user_install.html\">Jupyter\n",
       "  Widgets Documentation</a> for setup instructions.\n",
       "</p>\n",
       "<p>\n",
       "  If you're reading this message in another frontend (for example, a static\n",
       "  rendering on GitHub or <a href=\"https://nbviewer.jupyter.org/\">NBViewer</a>),\n",
       "  it may mean that your frontend doesn't currently support widgets.\n",
       "</p>\n"
      ],
      "text/plain": [
       "FloatProgress(value=0.0, bar_style='info', description='Progress:', layout=Layout(height='25px', width='50%'), max=1.0)"
      ]
     },
     "metadata": {},
     "output_type": "display_data"
    },
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "DataFrame[custname: string, hjcfzyzlx: int, hjcfcs: bigint, hjcfje: double, sfblwxtcqy: int, pwxqzgcs: bigint, pwxkzsfzyxqn: int, hjfxdj: int, tpfl: double, hjxypjdj: int, hjwrfmyq: int, yjjpjx: int, sfblwzmqdqy: int, sfblwqjscqy: int, wghdbtcflx: int, wghdbtcfcs: bigint, wghdbtcfje: double, tdlycflx: int, tdlycfcs: bigint, tdlycfje: double, sfwlszzqy: int, fxlz: int, zyjkcflx: int, zyjkcfcs: bigint, zyjkcfje: double, lgcflx: int, lgcfcs: bigint, lgcfje: double, sdldjfbscs: bigint, nxggzb: double, sbsffsqjf: int, fsaqscsgyzcd: int, aqjglx: int, aqjgcs: bigint, aqjgje: double, xfaqcflx: int, xfaqcfcs: bigint, xfaqcfje: double, cpzlcflx: int, cpzlcfcs: bigint, cpzlcfje: double, yyzzqsl: bigint, jscxlxqy: int, cycsjzdgyhd: int, mjjdbscs: bigint, czgqcs: bigint, gqdjzrcs: bigint, nsxydj: int, swcflx: int, swcfcs: bigint, swcfje: double, swfzch: int, sfqs: int, syhljbzdjzcflx: int, syhljbzdjzcfcs: bigint, syhljbzdjzcfje: double, ldcflx: int, ldcfcs: bigint, ldcfje: double, count(DISTINCT qentname, indate): bigint, jyyclx: int, yzwfsx: int, wzjycfje: double, wzjycfcs: bigint, wzjycflx: int, dcdycs: bigint, bdcdycs: bigint, dwdbcs: bigint, lrsxbzx: int, xycfje: decimal(27,2), xycfcs: bigint, xycflx: int, qtxzcfje: decimal(27,2), qtxzcfcs: bigint, qtxzcflx: int, qtajbscs: bigint]"
     ]
    }
   ],
   "source": [
    "df.persist()"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.9.13"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 5
}
