/**
 * SPDX-FileCopyrightText: 2023-2025 Sangfor Technologies Inc.
 * SPDX-License-Identifier: Mulan PSL v2
 */
const data = `2022-11-03 09:30:38.340 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 09:30:38.373 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 15872 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 09:30:38.374 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 09:30:41.967 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:30:41.996 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$654c743a] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:30:43.076 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:30:43.117 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:30:43.209 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 09:30:43.214 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 09:30:43.214 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 09:30:43.214 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 09:30:45.188 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 09:30:45.189 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 09:30:45.344 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 09:30:45.345 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 09:30:45.345 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 09:30:45.345 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 09:30:45.345 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 09:30:47.810 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 09:30:49.786 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15632ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 09:30:55.346 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 09:30:56.118 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 09:30:56.159 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 09:30:56.199 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 09:30:57.214 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 09:30:57.215 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 09:30:57.215 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 09:30:57.216 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 09:30:57.216 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 09:30:57.216 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 09:30:57.216 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 09:30:57.300 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 09:30:57.301 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 09:31:00.549 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 09:31:02.906 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 26.366 seconds (JVM running for 28.752)
2022-11-03 09:32:33.562 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 09:32:35.468 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 09:32:36.933 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 09:32:37.003 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 09:32:37.336 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 09:32:37.688 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 09:32:40.198 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221102.1, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"sql","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 09:32:41.574 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 09:32:41.577 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 09:32:41.585 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 09:32:43.617 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 09:32:45.973 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221102.1, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"sql","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 09:32:51.085 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 09:33:08.305 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 09:33:08.307 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 09:33:08.318 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 09:33:08.334 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 09:33:08.340 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T09:33:08.321(LocalDateTime), 2(Long)
2022-11-03 09:33:08.353 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 09:33:09.942 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 09:33:09.945 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T09:33:09.930(LocalDateTime), 2(Long)
2022-11-03 09:33:09.952 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 09:33:12.613 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 09:33:23.203 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-195398,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221102.1,serviceAccountName:spark
2022-11-03 09:33:34.759 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type sql, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 09:33:53.247 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-195398,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221102.1,KFJ_JOB_ID:2,KFJ_TASK_NODE_SELECTOR:null,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-195398,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_TASK_VOLUME_MOUNT:null,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-195398
2022-11-03 09:34:21.222 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T01:33:52Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-195398"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NODE_SELECTOR"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_VOLUME_MOUNT"}={.={}, f:name={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T01:33:52Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-195398
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10016078
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-195398
        uid: 1c966e68-8fa9-47d7-a234-363b37cc291f
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type sql, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-195398
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221102.1
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NODE_SELECTOR
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-195398
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_VOLUME_MOUNT
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-195398
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221102.1
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-195398
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 09:34:30.739 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 09:34:30.740 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 09:34:30.748 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 09:34:30.753 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 09:34:30.756 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T09:34:30.750(LocalDateTime), 2(Long)
2022-11-03 09:34:30.771 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 09:34:34.533 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 09:34:38.260 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 09:34:38.372 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 09:34:38.398 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 09:34:38.399 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 09:40:27.167 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 09:40:27.194 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23736 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 09:40:27.195 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 09:40:30.765 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:40:30.788 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$570cdabd] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:40:31.846 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:40:31.901 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 09:40:32.004 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 09:40:32.007 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 09:40:32.008 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 09:40:32.008 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 09:40:33.751 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 09:40:33.751 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 09:40:33.906 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 09:40:33.907 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 09:40:33.907 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 09:40:33.907 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 09:40:33.907 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 09:40:36.253 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 09:40:38.211 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15561ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 09:40:43.830 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 09:40:44.537 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 09:40:44.573 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 09:40:44.605 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 09:40:45.600 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 09:40:45.600 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 09:40:45.601 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 09:40:45.601 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 09:40:45.601 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 09:40:45.601 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 09:40:45.601 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 09:40:45.685 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 09:40:45.686 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 09:40:49.020 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 09:40:51.294 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 25.974 seconds (JVM running for 28.644)
2022-11-03 09:40:52.340 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 09:41:01.115 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 09:41:03.023 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 09:41:03.095 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 09:41:03.510 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 09:41:03.854 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 09:41:05.123 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.1, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"sql","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 09:41:06.155 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 09:41:06.158 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 09:41:06.166 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 09:41:07.660 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 09:41:09.964 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.1, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"sql","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 09:41:13.999 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 09:41:17.138 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 09:41:17.140 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 09:41:17.150 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 09:41:17.165 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 09:41:17.170 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T09:41:17.151(LocalDateTime), 2(Long)
2022-11-03 09:41:17.181 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 09:41:18.503 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 09:41:18.506 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T09:41:18.491(LocalDateTime), 2(Long)
2022-11-03 09:41:18.514 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 09:41:20.664 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 09:41:35.907 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-683028,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.1,serviceAccountName:spark
2022-11-03 09:41:51.832 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type sql, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 09:42:08.643 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-683028,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.1,KFJ_JOB_ID:2,KFJ_TASK_NODE_SELECTOR:null,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-683028,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_TASK_VOLUME_MOUNT:null,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-683028
2022-11-03 09:42:24.528 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T01:41:55Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-683028"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NODE_SELECTOR"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_VOLUME_MOUNT"}={.={}, f:name={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T01:41:54Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-683028
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10019734
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-683028
        uid: b6c48d6e-700b-41b0-aa36-05aa8f05cc85
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type sql, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-683028
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.1
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NODE_SELECTOR
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-683028
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_VOLUME_MOUNT
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-683028
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.1
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-683028
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 09:42:26.227 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 09:42:26.228 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 09:42:26.234 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 09:42:26.236 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 09:42:26.238 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T09:42:26.235(LocalDateTime), 2(Long)
2022-11-03 09:42:26.248 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 09:42:26.260 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 09:42:26.343 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 09:42:26.561 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 09:42:26.588 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 09:42:26.588 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:08:04.592 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:08:04.619 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23860 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:08:04.620 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:08:08.353 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:08:08.376 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$236b1d72] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:08:09.409 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:08:09.461 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:08:09.550 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:08:09.552 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:08:09.553 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:08:09.553 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:08:11.241 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:08:11.242 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:08:11.386 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:08:11.387 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:08:11.387 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:08:11.387 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:08:11.388 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:08:13.571 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:08:15.343 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15306ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:08:20.506 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:08:21.149 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:08:21.183 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:08:21.213 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:08:22.189 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:08:22.189 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:08:22.190 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:08:22.190 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:08:22.190 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:08:22.190 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:08:22.190 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:08:22.261 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:08:22.261 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:08:25.192 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:08:27.437 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 24.89 seconds (JVM running for 27.4)
2022-11-03 10:08:28.414 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:08:48.162 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:08:50.005 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:08:50.077 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:08:50.449 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:08:50.723 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 0
2022-11-03 10:08:51.733 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: []
2022-11-03 10:08:52.793 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:08:52.795 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:08:52.806 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:08:54.109 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:1,jobRunLimit:5
2022-11-03 10:08:56.148 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: []
2022-11-03 10:09:02.095 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:09:02.142 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:09:02.307 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:09:02.339 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:09:02.342 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:10:34.801 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:10:34.831 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23308 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:10:34.832 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:10:38.482 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:10:38.504 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$78d5ba35] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:10:39.539 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:10:39.582 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:10:39.678 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:10:39.681 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:10:39.682 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:10:39.682 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:10:41.475 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:10:41.476 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:10:41.576 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:10:41.576 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:10:41.577 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:10:41.577 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:10:41.577 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:10:44.050 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:10:46.592 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @17159ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:10:52.145 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:10:52.886 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:10:52.924 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:10:52.961 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:10:54.068 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:10:54.068 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:10:54.068 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:10:54.069 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:10:54.069 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:10:54.069 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:10:54.069 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:10:54.145 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:10:54.146 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:10:57.100 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:10:59.408 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 26.787 seconds (JVM running for 29.975)
2022-11-03 10:11:00.271 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:11:06.389 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:11:06.706 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:11:06.715 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:11:06.751 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:11:06.804 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 10:11:06.813 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"sql","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:11:06.907 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:11:06.910 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:11:06.915 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:11:06.915 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 10:11:09.854 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"sql","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:11:11.813 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 10:11:13.608 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:11:13.609 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:11:13.613 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:11:13.618 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:11:13.619 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:11:13.613(LocalDateTime), 2(Long)
2022-11-03 10:11:13.629 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:11:41.482 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 10:11:41.485 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T10:11:41.469(LocalDateTime), 2(Long)
2022-11-03 10:11:41.491 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 10:11:44.192 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 10:11:54.576 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-509342,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 10:12:07.576 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type sql, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 10:12:07.577 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-509342,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-509342,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-509342
2022-11-03 10:12:18.362 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T02:11:45Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-509342"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T02:11:45Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-509342
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10033256
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-509342
        uid: a156921a-357f-4618-8883-b82da363a811
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type sql, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-509342
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-509342
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-509342
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-509342
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 10:12:22.685 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:12:22.687 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:12:22.695 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:12:22.701 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:12:22.704 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:12:22.697(LocalDateTime), 2(Long)
2022-11-03 10:12:22.713 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:12:25.743 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:12:25.776 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:12:25.801 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:12:25.834 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:12:25.838 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:20:56.437 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:20:56.472 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 13796 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:20:56.473 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:20:59.738 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:20:59.771 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$654c743a] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:21:00.856 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:21:00.894 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:21:00.990 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:21:00.994 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:21:00.994 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:21:00.994 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:21:02.783 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:21:02.784 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:21:02.922 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:21:02.922 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:21:02.923 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:21:02.923 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:21:02.923 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:21:05.428 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:21:07.404 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15733ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:21:13.005 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:21:13.682 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:21:13.714 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:21:13.742 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:21:14.694 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:21:14.695 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:21:14.695 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:21:14.695 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:21:14.695 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:21:14.695 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:21:14.695 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:21:14.767 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:21:14.768 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:21:19.021 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:21:21.517 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 27.018 seconds (JVM running for 29.847)
2022-11-03 10:21:22.377 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:21:27.917 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:21:28.406 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:21:28.419 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:21:28.473 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:21:28.534 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 0
2022-11-03 10:21:28.541 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: []
2022-11-03 10:21:28.820 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:21:28.822 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:21:28.830 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:21:28.830 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:1,jobRunLimit:5
2022-11-03 10:21:31.016 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: []
2022-11-03 10:21:31.692 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:21:31.739 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:21:31.752 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:21:31.885 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:21:31.886 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:22:07.578 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:22:07.601 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 18884 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:22:07.602 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:22:10.902 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:22:10.932 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$bf9faa14] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:22:11.897 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:22:11.937 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:22:12.026 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:22:12.029 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:22:12.030 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:22:12.030 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:22:13.562 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:22:13.562 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:22:13.668 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:22:13.668 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:22:13.669 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:22:13.669 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:22:13.669 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:22:15.861 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:22:17.718 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15030ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:22:23.251 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:22:23.944 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:22:23.978 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:22:24.009 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:22:24.933 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:22:24.933 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:22:24.934 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:22:24.934 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:22:24.934 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:22:24.934 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:22:24.934 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:22:25.012 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:22:25.013 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:22:28.042 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:22:30.243 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 24.6 seconds (JVM running for 27.553)
2022-11-03 10:22:31.032 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:22:36.285 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:22:37.615 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:22:37.678 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:22:37.997 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:22:38.307 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 10:22:39.226 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"java","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:22:40.372 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:22:40.375 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:22:40.382 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:22:42.256 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 10:22:45.666 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":5,"--code_type":"java","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:22:46.978 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 10:22:47.839 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:22:47.839 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:22:47.842 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:22:47.848 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:22:47.849 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:22:47.843(LocalDateTime), 2(Long)
2022-11-03 10:22:47.860 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:22:49.125 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 10:22:49.127 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T10:22:49.121(LocalDateTime), 2(Long)
2022-11-03 10:22:49.132 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 10:22:49.132 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 10:22:52.881 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-170179,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 10:22:53.805 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type java, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 10:22:53.806 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-170179,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-170179,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-170179
2022-11-03 10:23:00.371 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T02:22:32Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-170179"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T02:22:32Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-170179
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10038138
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-170179
        uid: b7f8b708-1619-46b4-9185-09fcb1eb56e7
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type java, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-170179
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-170179
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-170179
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-170179
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 10:23:01.937 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:23:01.939 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:23:01.944 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:23:01.945 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:23:01.945 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:23:01.944(LocalDateTime), 2(Long)
2022-11-03 10:23:01.955 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:23:01.965 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:23:02.023 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:23:02.070 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:23:02.126 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:23:02.129 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:30:38.253 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:30:38.287 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 24212 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:30:38.288 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:30:41.706 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:30:41.743 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$83015aee] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:30:42.735 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:30:42.781 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:30:42.869 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:30:42.872 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:30:42.872 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:30:42.872 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:30:44.621 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:30:44.622 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:30:44.779 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:30:44.779 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:30:44.779 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:30:44.779 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:30:44.779 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:30:47.149 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:30:49.208 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15600ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:30:54.485 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:30:55.220 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:30:55.253 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:30:55.284 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:30:56.288 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:30:56.290 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:30:56.290 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:30:56.290 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:30:56.290 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:30:56.291 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:30:56.291 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:30:56.378 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:30:56.378 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:31:01.596 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:31:04.062 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 27.766 seconds (JVM running for 30.455)
2022-11-03 10:31:05.111 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:31:10.743 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:31:11.123 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:31:11.133 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:31:11.193 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:31:11.252 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 0
2022-11-03 10:31:11.259 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: []
2022-11-03 10:31:11.373 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:31:11.374 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:31:11.378 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:31:11.379 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:1,jobRunLimit:5
2022-11-03 10:31:13.478 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: []
2022-11-03 10:31:13.480 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:31:13.539 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:31:13.551 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:31:13.603 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:31:13.604 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:31:48.031 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:31:48.059 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23040 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:31:48.060 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:31:51.287 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:31:51.308 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$8f94ff7e] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:31:52.317 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:31:52.369 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:31:52.470 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:31:52.472 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:31:52.472 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:31:52.472 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:31:54.137 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:31:54.138 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:31:54.283 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:31:54.284 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:31:54.284 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:31:54.284 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:31:54.284 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:31:56.499 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:31:58.427 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15813ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:32:04.458 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:32:05.228 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:32:05.265 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:32:05.296 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:32:06.254 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:32:06.255 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:32:06.255 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:32:06.255 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:32:06.255 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:32:06.255 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:32:06.256 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:32:06.327 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:32:06.328 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:32:09.465 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:32:11.702 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 25.474 seconds (JVM running for 29.089)
2022-11-03 10:32:12.686 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:32:20.325 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:32:20.671 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:32:20.681 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:32:20.721 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:32:20.777 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 10:32:20.784 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5","--code_type":"Java","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:32:20.884 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:32:20.886 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:32:20.890 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:32:20.891 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 10:32:23.369 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5","--code_type":"Java","--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000","--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1","--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar","--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false","--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:32:24.904 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 10:32:25.612 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:32:25.613 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:32:25.617 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:32:25.623 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:32:25.625 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:32:25.617(LocalDateTime), 2(Long)
2022-11-03 10:32:25.640 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:32:27.219 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 10:32:27.221 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T10:32:27.215(LocalDateTime), 2(Long)
2022-11-03 10:32:27.230 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 10:32:27.231 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 10:32:30.174 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-748004,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 10:32:31.227 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type Java, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 10:32:31.228 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-748004,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-748004,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-748004
2022-11-03 10:32:34.927 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T02:32:06Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-748004"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T02:32:06Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-748004
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10042469
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-748004
        uid: 3dbb42d5-afdd-43cd-a17c-85f29fa0fbc2
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type Java, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-748004
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-748004
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-748004
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-748004
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 10:32:36.378 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:32:36.379 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:32:36.383 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:32:36.384 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:32:36.385 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:32:36.383(LocalDateTime), 2(Long)
2022-11-03 10:32:36.397 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:32:36.408 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:32:36.451 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:32:36.683 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:32:36.699 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:32:36.700 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:36:59.048 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:36:59.078 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 14624 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:36:59.079 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:37:02.023 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:37:02.046 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$b9877c84] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:37:03.025 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:37:03.066 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:37:03.154 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:37:03.157 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:37:03.157 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:37:03.157 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:37:04.763 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:37:04.763 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:37:04.854 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:37:04.855 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:37:04.855 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:37:04.855 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:37:04.855 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:37:07.013 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:37:08.776 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @14871ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:37:13.834 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:37:14.472 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:37:14.506 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:37:14.539 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:37:15.496 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:37:15.497 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:37:15.497 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:37:15.497 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:37:15.497 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:37:15.497 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:37:15.497 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:37:15.572 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:37:15.572 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:37:18.857 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:37:21.107 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 23.778 seconds (JVM running for 27.203)
2022-11-03 10:37:21.975 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:37:29.268 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:37:29.602 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:37:29.614 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:37:29.657 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:37:29.711 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 10:37:29.723 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:37:29.822 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:37:29.824 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:37:29.830 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:37:29.830 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 10:37:29.831 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:37:29.832 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 10:37:29.836 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:37:29.837 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:37:29.840 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:37:29.846 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:37:29.847 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:37:29.841(LocalDateTime), 2(Long)
2022-11-03 10:37:29.857 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:37:29.871 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 10:37:29.872 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T10:37:29.868(LocalDateTime), 2(Long)
2022-11-03 10:37:29.878 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 10:37:29.878 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 10:37:29.900 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-049900,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 10:37:30.181 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type Java, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 10:37:30.183 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-049900,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-049900,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-049900
2022-11-03 10:37:30.772 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T02:37:03Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-049900"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T02:37:03Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-049900
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10044717
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-049900
        uid: e2cb0856-4d8b-4e75-9d13-746ee1d83125
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type Java, --code_arguments --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-049900
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-049900
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-049900
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-049900
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 10:37:30.808 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:37:30.808 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:37:30.843 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:37:30.845 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:37:30.846 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:37:30.844(LocalDateTime), 2(Long)
2022-11-03 10:37:30.855 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:37:30.866 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:37:30.981 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:37:31.311 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:37:31.371 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:37:31.372 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 10:53:15.640 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 10:53:15.668 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 11292 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 10:53:15.669 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 10:53:18.900 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:53:18.919 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$f4497ba8] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:53:19.874 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:53:19.911 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 10:53:19.999 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 10:53:20.004 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 10:53:20.004 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 10:53:20.004 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 10:53:22.108 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:53:22.108 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 10:53:22.248 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 10:53:22.248 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 10:53:22.248 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 10:53:22.248 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:53:22.248 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 10:53:24.540 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:53:26.354 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15911ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 10:53:31.842 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 10:53:32.535 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 10:53:32.570 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 10:53:32.605 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 10:53:33.557 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 10:53:33.557 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 10:53:33.557 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 10:53:33.558 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 10:53:33.558 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 10:53:33.558 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 10:53:33.558 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 10:53:33.632 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 10:53:33.633 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 10:53:36.701 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 10:53:38.863 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 25.004 seconds (JVM running for 28.421)
2022-11-03 10:53:39.709 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 10:53:39.774 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 10:53:40.175 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 10:53:40.185 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 10:53:40.230 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 10:53:40.289 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 10:53:40.299 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:53:40.439 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 10:53:40.442 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 10:53:40.446 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 10:53:40.447 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 10:53:40.447 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 10:53:40.448 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 10:53:40.453 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:53:40.454 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:53:40.457 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:53:40.465 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:53:40.469 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:53:40.458(LocalDateTime), 2(Long)
2022-11-03 10:53:40.485 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:53:40.497 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 10:53:40.497 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T10:53:40.494(LocalDateTime), 2(Long)
2022-11-03 10:53:40.502 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 10:53:40.503 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 10:53:40.524 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-020524,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 10:53:40.905 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker:5, --code_type:Java, --code_arguments:--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image:registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file:s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf:spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class:com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 10:53:40.906 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-020524,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-020524,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-020524
2022-11-03 10:53:41.479 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T02:53:14Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-020524"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T02:53:14Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-020524
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10052056
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-020524
        uid: 785c37ac-50d0-4735-bfef-26a8ff3b8071
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker:5, --code_type:Java, --code_arguments:--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image:registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file:s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf:spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class:com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-020524
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-020524
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-020524
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-020524
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 10:53:41.510 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 10:53:41.511 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 10:53:41.515 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 10:53:41.516 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 10:53:41.517 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T10:53:41.515(LocalDateTime), 2(Long)
2022-11-03 10:53:41.531 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 10:53:41.542 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 10:53:41.567 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 10:53:41.582 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 10:53:41.614 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 10:53:41.615 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:08:19.637 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:08:19.667 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 19744 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:08:19.668 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:08:22.572 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:08:22.596 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$d8a4a31e] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:08:23.538 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:08:23.572 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:08:23.659 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:08:23.662 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:08:23.662 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:08:23.662 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:08:25.176 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:08:25.178 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:08:25.269 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:08:25.269 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:08:25.269 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:08:25.269 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:08:25.269 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:08:27.445 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:08:29.137 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15188ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:08:34.011 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:08:34.596 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:08:34.625 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:08:34.654 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:08:35.531 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:08:35.532 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:08:35.532 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:08:35.532 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:08:35.532 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:08:35.533 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:08:35.533 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:08:35.592 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:08:35.592 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:08:38.104 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:08:39.821 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 22.26 seconds (JVM running for 25.871)
2022-11-03 11:08:40.648 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:08:40.701 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:08:41.019 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:08:41.027 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:08:41.063 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:08:41.114 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:08:41.123 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:08:41.231 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:08:41.233 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:08:41.240 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:08:41.240 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:08:41.241 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:08:41.241 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:08:41.247 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:08:41.248 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:08:41.254 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:08:41.259 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:08:41.260 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:08:41.255(LocalDateTime), 2(Long)
2022-11-03 11:08:41.271 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:08:41.282 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:08:41.283 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:08:41.280(LocalDateTime), 2(Long)
2022-11-03 11:08:41.288 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:08:41.288 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:08:41.312 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-921312,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 11:08:41.609 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker:5, --code_type:Java, --code_arguments:CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image:registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file:s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf:spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class:com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 11:08:41.610 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-921312,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-921312,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-921312
2022-11-03 11:08:42.095 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:08:14Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-921312"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:08:14Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-921312
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10058980
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-921312
        uid: fc67394d-f7db-472e-9a66-b440dff8e3c8
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker:5, --code_type:Java, --code_arguments:CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image:registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file:s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf:spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class:com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-921312
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-921312
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-921312
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-921312
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:08:42.111 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:08:42.111 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:08:42.120 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:08:42.121 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:08:42.121 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:08:42.120(LocalDateTime), 2(Long)
2022-11-03 11:08:42.128 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:08:42.139 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:08:42.165 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:08:42.172 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:08:42.204 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:08:42.205 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:10:37.882 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:10:37.906 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 4036 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:10:37.906 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:10:40.807 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:10:40.831 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:10:41.790 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:10:41.828 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:10:41.914 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:10:41.917 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:10:41.917 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:10:41.917 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:10:43.455 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:10:43.455 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:10:43.553 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:10:43.553 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:10:43.553 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:10:43.553 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:10:43.553 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:10:45.754 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:10:47.461 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13518ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:10:52.163 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:10:52.763 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:10:52.792 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:10:52.819 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:10:53.838 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:10:53.839 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:10:53.839 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:10:53.839 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:10:53.840 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:10:53.840 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:10:53.840 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:10:53.914 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:10:53.915 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:10:56.781 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:10:59.597 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 23.47 seconds (JVM running for 25.655)
2022-11-03 11:11:00.935 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:11:00.994 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:11:01.290 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:11:01.298 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:11:01.336 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:11:01.393 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:11:01.401 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:11:01.480 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:11:01.481 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:11:01.485 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:11:01.486 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:11:01.487 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:11:01.487 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:11:01.492 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:11:01.493 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:11:01.502 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:11:01.507 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:11:01.509 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:11:01.503(LocalDateTime), 2(Long)
2022-11-03 11:11:01.526 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:11:01.537 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:11:01.538 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:11:01.535(LocalDateTime), 2(Long)
2022-11-03 11:11:01.547 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:11:01.548 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:11:01.575 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-061575,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 11:11:01.944 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5, --code_type Java, --code_arguments CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 11:11:01.947 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-061575,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-061575,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-061575
2022-11-03 11:11:02.463 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:10:35Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-061575"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:10:35Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-061575
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10060042
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-061575
        uid: 106c7dff-9328-4d86-a081-11bb0015d962
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5, --code_type Java, --code_arguments CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-061575
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-061575
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-061575
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-061575
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:11:02.489 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:11:02.491 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:11:02.496 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:11:02.498 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:11:02.500 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:11:02.497(LocalDateTime), 2(Long)
2022-11-03 11:11:02.512 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:11:02.522 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:11:02.545 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:11:02.621 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:11:02.640 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:11:02.640 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:12:15.910 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:12:15.939 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 8344 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:12:15.940 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:12:18.844 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:12:18.867 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:12:19.793 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:12:19.846 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:12:19.925 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:12:19.930 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:12:19.930 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:12:19.930 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:12:21.547 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:12:21.547 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:12:21.640 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:12:21.641 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:12:21.641 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:12:21.641 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:12:21.641 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:12:23.819 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:12:25.458 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13327ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:12:30.206 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:12:30.811 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:12:30.837 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:12:30.864 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:12:31.705 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:12:31.706 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:12:31.706 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:12:31.706 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:12:31.706 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:12:31.707 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:12:31.707 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:12:31.765 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:12:31.766 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:12:34.360 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:12:36.148 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 21.888 seconds (JVM running for 24.017)
2022-11-03 11:12:36.851 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:12:36.908 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:12:37.232 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:12:37.241 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:12:37.279 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:12:37.331 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:12:37.340 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:12:37.429 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:12:37.429 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:12:37.435 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:12:37.436 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:12:37.437 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:12:37.438 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:12:37.443 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:12:37.444 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:12:37.449 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:12:37.452 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:12:37.453 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:12:37.450(LocalDateTime), 2(Long)
2022-11-03 11:12:37.472 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:12:37.487 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:12:37.488 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:12:37.483(LocalDateTime), 2(Long)
2022-11-03 11:12:37.492 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:12:37.492 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:12:37.513 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-157513,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 11:12:37.792 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker "5", --code_type "Java", --code_arguments "CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000", --image "registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1", --code_file "s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar", --sparkConf "spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false", --code_class "com.geniusai.aip.app.SparkSqlOnHive"]
2022-11-03 11:12:37.793 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-157513,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-157513,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-157513
2022-11-03 11:12:38.273 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:12:11Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-157513"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:12:11Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-157513
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10060784
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-157513
        uid: 5da7cde9-cdfc-4a66-af9d-a364cf8bb196
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker "5", --code_type "Java", --code_arguments "CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000", --image "registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1", --code_file "s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar", --sparkConf "spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false", --code_class "com.geniusai.aip.app.SparkSqlOnHive"]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-157513
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-157513
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-157513
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-157513
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:12:38.287 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:12:38.287 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:12:38.291 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:12:38.292 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:12:38.293 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:12:38.291(LocalDateTime), 2(Long)
2022-11-03 11:12:38.305 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:12:38.317 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:12:38.344 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:12:38.476 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:12:38.507 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:12:38.508 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:20:58.601 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:20:58.622 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 24440 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:20:58.623 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:21:01.556 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:21:01.578 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:21:02.549 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:21:02.585 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:21:02.671 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:21:02.674 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:21:02.674 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:21:02.674 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:21:04.260 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:21:04.261 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:21:04.354 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:21:04.354 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:21:04.354 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:21:04.354 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:21:04.355 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:21:06.457 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:21:08.225 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13493ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:21:13.050 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:21:13.645 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:21:13.671 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:21:13.699 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:21:14.716 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:21:14.717 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:21:14.717 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:21:14.717 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:21:14.717 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:21:14.717 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:21:14.718 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:21:14.791 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:21:14.791 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:21:19.395 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:21:21.371 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 24.708 seconds (JVM running for 26.639)
2022-11-03 11:21:22.211 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:21:22.271 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:21:22.624 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:21:22.634 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:21:22.689 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:21:22.747 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:21:22.755 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:21:22.895 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:21:22.897 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:21:22.901 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:21:22.901 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:21:22.901 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:21:22.902 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:21:22.908 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:21:22.909 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:21:22.913 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:21:22.915 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:21:22.916 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:21:22.913(LocalDateTime), 2(Long)
2022-11-03 11:21:22.927 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:21:22.942 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:21:22.942 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:21:22.939(LocalDateTime), 2(Long)
2022-11-03 11:21:22.950 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:21:22.951 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:21:22.970 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-682970,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 11:21:23.260 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker 5]
2022-11-03 11:21:23.262 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-682970,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-682970,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-682970
2022-11-03 11:21:23.774 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:20:56Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-682970"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:20:56Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-682970
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10064760
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-682970
        uid: d3cb0784-ac50-4b4e-a801-c2d68113fc82
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker 5]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-682970
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-682970
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-682970
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-682970
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:21:23.787 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:21:23.788 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:21:23.793 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:21:23.794 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:21:23.795 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:21:23.794(LocalDateTime), 2(Long)
2022-11-03 11:21:23.814 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:21:23.826 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:21:23.857 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:21:23.942 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:21:23.969 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:21:23.969 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:26:53.501 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:26:53.526 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23432 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:26:53.527 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:26:56.589 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:26:56.614 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$f0792ca6] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:26:57.565 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:26:57.599 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:26:57.672 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:26:57.676 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:26:57.676 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:26:57.676 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:26:59.440 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:26:59.441 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:26:59.537 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:26:59.538 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:26:59.538 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:26:59.538 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:26:59.538 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:27:01.987 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:27:03.773 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @14230ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:27:09.345 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:27:10.264 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:27:10.293 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:27:10.324 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:27:11.310 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:27:11.311 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:27:11.311 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:27:11.311 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:27:11.311 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:27:11.311 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:27:11.312 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:27:11.378 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:27:11.378 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:27:14.332 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:27:16.317 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 24.586 seconds (JVM running for 26.774)
2022-11-03 11:27:17.202 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:27:17.257 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:27:17.571 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:27:17.580 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:27:17.617 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:27:17.666 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:27:17.674 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:27:17.748 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:27:17.749 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:27:17.754 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:27:17.755 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:27:17.755 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:27:17.756 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:27:17.761 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:27:17.762 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:27:17.766 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:27:17.768 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:27:17.770 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:27:17.766(LocalDateTime), 2(Long)
2022-11-03 11:27:17.801 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:27:17.814 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:27:17.815 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:27:17.811(LocalDateTime), 2(Long)
2022-11-03 11:27:17.822 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:27:17.822 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:27:17.842 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-037842,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,serviceAccountName:spark
2022-11-03 11:27:18.162 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5]
2022-11-03 11:27:18.163 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-037842,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-037842,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-037842
2022-11-03 11:27:18.724 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:26:51Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-037842"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:26:51Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-037842
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10067440
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-037842
        uid: 6d133068-21b9-4699-8fdc-3c6427c89eb8
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-037842
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-037842
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-037842
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.2
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-037842
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:27:18.736 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:27:18.736 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:27:18.739 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:27:18.740 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:27:18.741 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:27:18.740(LocalDateTime), 2(Long)
2022-11-03 11:27:18.754 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:27:18.765 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:27:18.786 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:27:18.792 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:27:18.821 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:27:18.822 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:31:44.809 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:31:44.836 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 19332 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:31:44.837 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:31:48.028 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:31:48.075 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:31:49.042 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:31:49.084 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:31:49.158 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:31:49.160 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:31:49.161 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:31:49.162 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:31:50.751 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:31:50.751 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:31:50.844 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:31:50.844 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:31:50.845 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:31:50.845 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:31:50.845 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:31:52.969 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:31:54.654 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13843ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:31:59.559 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:32:00.185 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:32:00.217 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:32:00.251 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:32:01.169 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:32:01.169 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:32:01.169 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:32:01.169 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:32:01.169 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:32:01.170 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:32:01.170 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:32:01.234 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:32:01.235 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:32:03.976 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:32:05.817 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 22.64 seconds (JVM running for 25.007)
2022-11-03 11:32:06.565 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:32:06.642 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:32:06.999 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:32:07.009 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:32:07.053 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:32:07.109 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:32:07.118 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.3, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:32:07.227 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:32:07.229 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:32:07.235 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:32:07.236 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:32:07.237 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.3, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:32:07.237 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:32:07.247 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:32:07.247 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:32:07.253 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:32:07.257 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:32:07.258 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:32:07.254(LocalDateTime), 2(Long)
2022-11-03 11:32:07.269 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:32:07.281 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:32:07.281 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:32:07.278(LocalDateTime), 2(Long)
2022-11-03 11:32:07.297 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:32:07.297 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:32:07.324 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-327324,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.3,serviceAccountName:spark
2022-11-03 11:32:07.656 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5]
2022-11-03 11:32:07.657 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-327324,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.3,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-327324,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-327324
2022-11-03 11:32:08.222 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:31:40Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-327324"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:31:40Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-327324
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10069698
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-327324
        uid: a317981b-225d-4a35-97d6-51e7c0987461
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-327324
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.3
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-327324
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-327324
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.3
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-327324
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:32:08.237 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:32:08.238 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:32:08.241 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:32:08.242 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:32:08.243 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:32:08.241(LocalDateTime), 2(Long)
2022-11-03 11:32:08.252 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:32:08.263 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:32:08.292 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:32:08.353 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:32:08.378 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:32:08.380 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 11:35:11.629 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 11:35:11.652 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 6660 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 11:35:11.652 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 11:35:14.632 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:35:14.654 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:35:15.613 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:35:15.645 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 11:35:15.764 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 11:35:15.768 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 11:35:15.768 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 11:35:15.768 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 11:35:17.901 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:35:17.902 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 11:35:18.025 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 11:35:18.026 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 11:35:18.026 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 11:35:18.026 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:35:18.026 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 11:35:20.217 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:35:21.947 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15272ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 11:35:26.793 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 11:35:27.409 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 11:35:27.443 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 11:35:27.473 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 11:35:28.459 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 11:35:28.459 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 11:35:28.460 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 11:35:28.460 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 11:35:28.460 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 11:35:28.460 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 11:35:28.460 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 11:35:28.531 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 11:35:28.532 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 11:35:31.404 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 11:35:33.754 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 23.783 seconds (JVM running for 27.078)
2022-11-03 11:35:34.678 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 11:35:34.734 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 11:35:35.082 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 11:35:35.092 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 11:35:35.132 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 11:35:35.180 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 11:35:35.188 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.4, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:35:35.298 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 11:35:35.299 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 11:35:35.303 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 11:35:35.304 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 11:35:35.304 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.4, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 11:35:35.305 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 11:35:35.310 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:35:35.311 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:35:35.315 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:35:35.318 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:35:35.319 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:35:35.316(LocalDateTime), 2(Long)
2022-11-03 11:35:35.334 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:35:35.348 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 11:35:35.348 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T11:35:35.345(LocalDateTime), 2(Long)
2022-11-03 11:35:35.355 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 11:35:35.356 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 11:35:35.375 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-535375,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.4,serviceAccountName:spark
2022-11-03 11:35:35.737 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 11:35:35.739 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-535375,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.4,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-535375,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-535375
2022-11-03 11:35:36.339 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T03:35:09Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-535375"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T03:35:09Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-535375
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10071516
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-535375
        uid: dc358e36-2a20-436d-9167-9c717adcecb7
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-535375
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-535375
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-535375
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.4
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-535375
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 11:35:36.354 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 11:35:36.355 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 11:35:36.358 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 11:35:36.359 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 11:35:36.360 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T11:35:36.359(LocalDateTime), 2(Long)
2022-11-03 11:35:36.368 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 11:35:36.379 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 11:35:36.407 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 11:35:36.416 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 11:35:36.444 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 11:35:36.446 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 12:06:54.375 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 12:06:54.401 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 10464 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 12:06:54.403 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 12:06:57.281 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:06:57.302 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:06:58.192 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:06:58.225 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:06:58.294 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 12:06:58.297 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 12:06:58.297 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 12:06:58.297 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 12:07:00.254 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:07:00.255 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:07:00.389 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 12:07:00.390 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 12:07:00.390 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 12:07:00.390 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:07:00.390 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:07:04.171 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:07:06.108 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15642ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 12:07:11.556 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:07:12.273 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 12:07:12.311 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 12:07:12.351 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 12:07:13.576 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 12:07:13.577 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 12:07:13.577 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 12:07:13.577 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 12:07:13.578 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 12:07:13.578 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 12:07:13.578 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 12:07:13.644 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 12:07:13.645 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 12:07:16.987 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 12:07:20.397 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 27.704 seconds (JVM running for 29.931)
2022-11-03 12:07:21.366 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 12:07:21.420 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 12:07:21.851 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 12:07:21.860 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 12:07:21.896 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 12:07:21.959 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 12:07:21.971 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:07:22.068 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 12:07:22.076 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 12:07:22.083 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 12:07:22.085 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 12:07:22.085 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:07:22.086 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 12:07:22.093 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:07:22.093 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:07:22.103 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:07:22.106 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:07:22.108 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:07:22.103(LocalDateTime), 2(Long)
2022-11-03 12:07:22.129 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:07:22.165 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 12:07:22.165 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T12:07:22.162(LocalDateTime), 2(Long)
2022-11-03 12:07:22.178 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 12:07:22.179 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 12:07:22.205 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-442204,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5,serviceAccountName:spark
2022-11-03 12:07:22.535 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 12:07:22.537 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-442204,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-442204,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-442204
2022-11-03 12:07:23.126 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T04:06:55Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-442204"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T04:06:55Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-442204
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10086783
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-442204
        uid: 312543ac-c613-4c9b-be8d-859015f8587d
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-442204
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-442204
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-442204
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-442204
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 12:07:23.139 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:07:23.140 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:07:23.143 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:07:23.144 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:07:23.145 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:07:23.143(LocalDateTime), 2(Long)
2022-11-03 12:07:23.155 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:07:23.165 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 12:07:23.191 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 12:07:23.304 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 12:07:23.326 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 12:07:23.327 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 12:26:49.184 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 12:26:49.206 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 15856 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 12:26:49.207 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 12:26:51.851 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:26:51.871 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$7d5c89a1] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:26:52.756 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:26:52.791 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:26:52.875 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 12:26:52.878 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 12:26:52.878 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 12:26:52.878 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 12:26:54.363 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:26:54.363 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:26:54.455 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 12:26:54.455 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 12:26:54.455 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 12:26:54.455 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:26:54.455 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:26:56.487 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:26:58.101 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13661ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 12:27:02.847 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:27:03.471 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 12:27:03.500 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 12:27:03.529 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 12:27:04.388 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 12:27:04.389 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 12:27:04.389 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 12:27:04.389 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 12:27:04.389 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 12:27:04.390 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 12:27:04.390 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 12:27:04.456 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 12:27:04.457 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 12:27:07.301 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 12:27:09.133 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 21.528 seconds (JVM running for 24.695)
2022-11-03 12:27:09.866 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 12:27:09.915 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 12:27:10.285 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 12:27:10.294 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 12:27:10.338 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 12:27:10.402 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 12:27:10.408 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:27:10.493 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 12:27:10.495 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 12:27:10.498 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 12:27:10.499 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 12:27:10.499 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:27:10.500 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 12:27:10.504 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:27:10.504 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:27:10.509 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:27:10.511 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:27:10.512 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:27:10.509(LocalDateTime), 2(Long)
2022-11-03 12:27:10.527 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:27:10.538 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 12:27:10.538 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T12:27:10.535(LocalDateTime), 2(Long)
2022-11-03 12:27:10.544 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 12:27:10.544 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 12:27:10.567 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-630564,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5,serviceAccountName:spark
2022-11-03 12:27:10.835 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 12:27:10.837 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-630564,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-630564,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-630564
2022-11-03 12:27:11.357 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T04:26:44Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-630564"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T04:26:43Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-630564
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10095773
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-630564
        uid: 67c76cb8-c974-45f7-ad2e-d47fd87b285c
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-630564
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-630564
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-630564
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.5
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-630564
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 12:27:11.374 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:27:11.375 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:27:11.381 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:27:11.382 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:27:11.383 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:27:11.381(LocalDateTime), 2(Long)
2022-11-03 12:27:11.393 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:27:11.403 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 12:27:11.427 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 12:27:11.433 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 12:27:11.461 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 12:27:11.462 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 12:37:46.232 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 12:37:46.252 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 14812 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 12:37:46.254 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 12:37:48.930 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:37:48.950 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:37:49.846 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:37:49.880 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:37:49.949 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 12:37:49.951 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 12:37:49.951 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 12:37:49.951 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 12:37:51.393 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:37:51.394 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:37:51.485 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 12:37:51.486 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 12:37:51.487 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 12:37:51.487 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:37:51.487 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:37:53.596 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:37:55.418 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13625ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 12:38:00.197 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:38:00.798 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 12:38:00.831 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 12:38:00.863 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 12:38:01.688 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 12:38:01.688 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 12:38:01.688 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 12:38:01.688 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 12:38:01.689 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 12:38:01.689 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 12:38:01.689 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 12:38:01.756 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 12:38:01.756 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 12:38:04.397 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 12:38:06.228 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 22.042 seconds (JVM running for 24.436)
2022-11-03 12:38:06.936 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 12:38:06.991 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 12:38:07.354 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 12:38:07.364 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 12:38:07.406 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 12:38:07.454 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 12:38:07.464 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.6, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:38:07.552 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 12:38:07.553 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 12:38:07.559 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 12:38:07.559 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 12:38:07.560 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.6, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:38:07.563 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 12:38:07.567 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:38:07.567 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:38:07.572 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:38:07.575 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:38:07.576 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:38:07.572(LocalDateTime), 2(Long)
2022-11-03 12:38:07.595 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:38:07.606 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 12:38:07.607 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T12:38:07.603(LocalDateTime), 2(Long)
2022-11-03 12:38:07.611 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 12:38:07.612 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 12:38:07.635 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-287635,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.6,serviceAccountName:spark
2022-11-03 12:38:07.905 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 12:38:07.906 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-287635,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.6,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-287635,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-287635
2022-11-03 12:38:08.449 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T04:37:41Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-287635"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T04:37:41Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-287635
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10100737
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-287635
        uid: 67bc333c-573a-4fcf-bd31-e39db3e0da53
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-287635
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-287635
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-287635
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.6
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-287635
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 12:38:08.477 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:38:08.478 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:38:08.483 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:38:08.484 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:38:08.485 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:38:08.483(LocalDateTime), 2(Long)
2022-11-03 12:38:08.495 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:38:08.505 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 12:38:08.529 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 12:38:08.539 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 12:38:08.560 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 12:38:08.561 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 12:52:37.387 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 12:52:37.410 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23796 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 12:52:37.411 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 12:52:40.072 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:52:40.093 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$d8a4a31e] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:52:40.965 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:52:41.004 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 12:52:41.076 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 12:52:41.078 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 12:52:41.079 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 12:52:41.079 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 12:52:42.676 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:52:42.676 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 12:52:42.780 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 12:52:42.780 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 12:52:42.781 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 12:52:42.781 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:52:42.781 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 12:52:44.833 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:52:46.511 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13414ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 12:52:51.002 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 12:52:51.609 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 12:52:51.635 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 12:52:51.661 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 12:52:52.467 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 12:52:52.524 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 12:52:52.525 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 12:52:55.477 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 12:52:57.305 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 21.638 seconds (JVM running for 24.209)
2022-11-03 12:52:57.984 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 12:52:58.032 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 12:52:58.343 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 12:52:58.352 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 12:52:58.390 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 12:52:58.438 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 12:52:58.446 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:52:58.526 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 12:52:58.527 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 12:52:58.531 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 12:52:58.532 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 12:52:58.532 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 12:52:58.533 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 12:52:58.537 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:52:58.538 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:52:58.541 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:52:58.544 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:52:58.545 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:52:58.542(LocalDateTime), 2(Long)
2022-11-03 12:52:58.557 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:52:58.569 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 12:52:58.569 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T12:52:58.566(LocalDateTime), 2(Long)
2022-11-03 12:52:58.579 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 12:52:58.580 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 12:52:58.605 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-178605,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7,serviceAccountName:spark
2022-11-03 12:52:58.873 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 12:52:58.874 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-178605,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-178605,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-178605
2022-11-03 12:52:59.354 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T04:52:31Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-178605"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T04:52:31Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-178605
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10107488
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-178605
        uid: b520d15f-c05a-4c26-b76d-bc2901da986f
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-178605
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-178605
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-178605
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-178605
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-cjs89
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-cjs89
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-cjs89
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 12:52:59.368 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 12:52:59.369 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 12:52:59.372 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 12:52:59.373 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 12:52:59.374 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T12:52:59.373(LocalDateTime), 2(Long)
2022-11-03 12:52:59.394 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 12:52:59.404 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 12:52:59.429 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 12:52:59.438 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 12:52:59.466 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 12:52:59.469 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 14:11:53.748 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 14:11:53.769 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 23744 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 14:11:53.770 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 14:11:56.602 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:11:56.622 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$acee5bc2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:11:57.521 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:11:57.556 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:11:57.633 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 14:11:57.636 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 14:11:57.636 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 14:11:57.636 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 14:11:59.153 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 14:11:59.153 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 14:11:59.245 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 14:11:59.245 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 14:11:59.245 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 14:11:59.245 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 14:11:59.245 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 14:12:01.387 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 14:12:02.982 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13515ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 14:12:07.712 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 14:12:08.295 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 14:12:08.323 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 14:12:08.351 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 14:12:09.204 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 14:12:09.204 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 14:12:09.204 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 14:12:09.204 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 14:12:09.205 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 14:12:09.205 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 14:12:09.205 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 14:12:09.268 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 14:12:09.268 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 14:12:11.910 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 14:12:13.572 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 21.59 seconds (JVM running for 24.105)
2022-11-03 14:12:14.281 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 14:12:14.332 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 14:12:14.637 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 14:12:14.646 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 14:12:14.685 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 14:12:14.756 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 14:12:14.764 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 14:12:14.866 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 14:12:14.869 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 14:12:14.873 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 14:12:14.874 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 14:12:14.874 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 14:12:14.875 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 14:12:14.879 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 14:12:14.880 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 14:12:14.884 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 14:12:14.887 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 14:12:14.889 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T14:12:14.885(LocalDateTime), 2(Long)
2022-11-03 14:12:14.899 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 14:12:14.912 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 14:12:14.914 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T14:12:14.909(LocalDateTime), 2(Long)
2022-11-03 14:12:14.920 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 14:12:14.920 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 14:12:14.950 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-admin-dns-log-20220726-10000-934950,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7,serviceAccountName:spark
2022-11-03 14:12:15.442 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 14:12:15.444 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-934950,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-admin-dns-log-20220726-10000-934950,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-admin-dns-log-20220726-10000-934950
2022-11-03 14:12:15.965 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T06:11:48Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-admin-dns-log-20220726-10000-934950"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T06:11:48Z
        }]
        name: job-2-spark-admin-admin-dns-log-20220726-10000-934950
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10143483
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-admin-dns-log-20220726-10000-934950
        uid: c21c44a8-733f-4b15-9c74-a91c2f8b8787
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-934950
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-admin-dns-log-20220726-10000-934950
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-admin-dns-log-20220726-10000-934950
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.7
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-admin-dns-log-20220726-10000-934950
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-tjwkf
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-tjwkf
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-tjwkf
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 14:12:15.984 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 14:12:15.985 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 14:12:15.989 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 14:12:15.990 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 14:12:15.991 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T14:12:15.989(LocalDateTime), 2(Long)
2022-11-03 14:12:16.000 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 14:12:16.012 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 14:12:16.037 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 14:12:16.042 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 14:12:16.137 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 14:12:16.138 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 14:45:43.254 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 14:45:43.276 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 25432 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 14:45:43.277 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 14:45:46.357 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:45:46.380 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$d756f2d2] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:45:47.355 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:45:47.400 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:45:47.512 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 14:45:47.515 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 14:45:47.515 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 14:45:47.516 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 14:45:49.178 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 14:45:49.179 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 14:45:49.284 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 14:45:49.285 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 14:45:49.285 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 14:45:49.285 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 14:45:49.285 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 14:45:51.464 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 14:45:53.243 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13896ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 14:45:58.220 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 14:45:58.849 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 14:45:58.878 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 14:45:58.905 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 14:45:59.769 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 14:45:59.770 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 14:45:59.770 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 14:45:59.770 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 14:45:59.770 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 14:45:59.770 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 14:45:59.770 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 14:45:59.832 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 14:45:59.832 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 14:46:03.895 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 14:46:05.894 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 24.303 seconds (JVM running for 26.548)
2022-11-03 14:46:06.688 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 14:46:06.746 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 14:46:07.093 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 14:46:07.103 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 14:46:07.146 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 14:46:07.217 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 14:46:07.225 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.9, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 14:46:07.339 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 14:46:07.340 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 14:46:07.345 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 14:46:07.346 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 14:46:07.346 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.9, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 14:46:07.347 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 14:46:07.351 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 14:46:07.351 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 14:46:07.355 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 14:46:07.358 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 14:46:07.360 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:11:59 - MainThread - 任务[test12]执行失败error: HTTPConnectionPool(host='sparkjob-2-spark-admin-admin-dns-log-20220726-10000-93-ui-svc', port=4040): Max retries exceeded with url: /api/v1/applications/spark-4c16074c29df4d50a7cfd210063eab73/jobs (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fb22dd93e20>: Failed to establish a new connection: [Errno 111] Connection refused'))2022-11-03 14:46:07 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T14:46:07.356(LocalDateTime), 2(Long)
2022-11-03 14:46:07.371 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 14:46:07.385 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 14:46:07.386 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T14:46:07.382(LocalDateTime), 2(Long)
2022-11-03 14:46:07.393 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 14:46:07.394 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 14:46:07.414 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-967414,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.9,serviceAccountName:spark
2022-11-03 14:46:07.764 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 14:46:07.764 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-967414,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.9,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-967414,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-967414
2022-11-03 14:46:08.401 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T06:45:40Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-967414"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T06:45:40Z
        }]
        name: job-2-spark-admin-967414
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10158876
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-967414
        uid: 52211982-1972-421a-bea4-7d715bc08689
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-967414
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.9
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-967414
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-967414
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.9
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-967414
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-tjwkf
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-tjwkf
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-tjwkf
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 14:46:08.415 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 14:46:08.416 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 14:46:08.419 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 14:46:08.420 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 14:46:08.421 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:11:59 - MainThread - 任务[test12]执行失败error: HTTPConnectionPool(host='sparkjob-2-spark-admin-admin-dns-log-20220726-10000-93-ui-svc', port=4040): Max retries exceeded with url: /api/v1/applications/spark-4c16074c29df4d50a7cfd210063eab73/jobs (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fb22dd93e20>: Failed to establish a new connection: [Errno 111] Connection refused'))2022-11-03 14:46:07 - main - 开始调度任务[test12]
2022-11-03 14:46:08 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T14:46:08.419(LocalDateTime), 2(Long)
2022-11-03 14:46:08.428 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 14:46:08.439 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 14:46:08.470 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 14:46:08.475 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 14:46:08.505 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 14:46:08.508 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 14:56:50.314 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 14:56:50.334 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 316 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 14:56:50.335 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 14:56:53.200 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:56:53.222 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$7d5c89a1] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:56:54.173 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:56:54.212 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 14:56:54.297 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 14:56:54.300 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 14:56:54.300 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 14:56:54.300 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 14:56:55.908 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 14:56:55.908 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 14:56:55.994 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 14:56:55.995 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 14:56:55.995 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 14:56:55.995 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 14:56:55.995 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 14:56:58.199 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 14:56:59.894 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @15256ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 14:57:04.944 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 14:57:05.610 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 14:57:05.639 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 14:57:05.667 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 14:57:06.538 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 14:57:06.538 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 14:57:06.538 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 32G
2022-11-03 14:57:06.538 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 4
2022-11-03 14:57:06.539 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 14:57:06.539 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 14:57:06.539 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 14:57:06.603 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 14:57:06.603 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 14:57:09.356 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 14:57:11.271 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 22.619 seconds (JVM running for 26.633)
2022-11-03 14:57:12.300 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 14:57:12.368 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 14:57:12.727 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 14:57:12.735 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 14:57:12.779 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 14:57:12.845 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 14:57:12.857 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.10, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 14:57:13.006 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 14:57:13.035 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 14:57:13.044 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 14:57:13.045 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 14:57:13.045 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.10, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 14:57:13.045 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 14:57:13.057 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 14:57:13.058 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 14:57:13.061 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 14:57:13.064 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 14:57:13.065 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:11:59 - MainThread - 任务[test12]执行失败error: HTTPConnectionPool(host='sparkjob-2-spark-admin-admin-dns-log-20220726-10000-93-ui-svc', port=4040): Max retries exceeded with url: /api/v1/applications/spark-4c16074c29df4d50a7cfd210063eab73/jobs (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fb22dd93e20>: Failed to establish a new connection: [Errno 111] Connection refused'))2022-11-03 14:46:07 - main - 开始调度任务[test12]
2022-11-03 14:46:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:45:52 - MainThread - 任务[test12]执行失败error: read_namespaced_pod_log() missing 1 required positional argument: 'namespace'2022-11-03 14:57:13 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T14:57:13.061(LocalDateTime), 2(Long)
2022-11-03 14:57:13.080 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 14:57:13.095 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 14:57:13.097 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T14:57:13.092(LocalDateTime), 2(Long)
2022-11-03 14:57:13.104 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 14:57:13.104 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 14:57:13.141 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-633141,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.10,serviceAccountName:spark
2022-11-03 14:57:13.504 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 14:57:13.506 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-633141,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.10,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-633141,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-633141
2022-11-03 14:57:14.060 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T06:56:46Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-633141"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T06:56:46Z
        }]
        name: job-2-spark-admin-633141
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10163963
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-633141
        uid: 5dd3cd30-ead3-44c9-887f-7d244cde2e30
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-633141
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.10
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-633141
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-633141
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.10
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-633141
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-tjwkf
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-tjwkf
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-tjwkf
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 14:57:14.506 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 14:57:14.506 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 14:57:14.509 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 14:57:14.510 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 14:57:14.511 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:11:59 - MainThread - 任务[test12]执行失败error: HTTPConnectionPool(host='sparkjob-2-spark-admin-admin-dns-log-20220726-10000-93-ui-svc', port=4040): Max retries exceeded with url: /api/v1/applications/spark-4c16074c29df4d50a7cfd210063eab73/jobs (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fb22dd93e20>: Failed to establish a new connection: [Errno 111] Connection refused'))2022-11-03 14:46:07 - main - 开始调度任务[test12]
2022-11-03 14:46:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:45:52 - MainThread - 任务[test12]执行失败error: read_namespaced_pod_log() missing 1 required positional argument: 'namespace'2022-11-03 14:57:13 - main - 开始调度任务[test12]
2022-11-03 14:57:14 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T14:57:14.510(LocalDateTime), 2(Long)
2022-11-03 14:57:14.526 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 14:57:14.538 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 14:57:14.563 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 14:57:14.570 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 14:57:14.599 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 14:57:14.604 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
2022-11-03 16:06:35.800 [main] WARN  o.s.b.t.j.DuplicateJsonObjectContextCustomizerFactory$DuplicateJsonObjectContextCustomizer - 

Found multiple occurrences of org.json.JSONObject on the class path:

\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/vaadin/external/google/android-json/0.0.20131108.vaadin1/android-json-0.0.20131108.vaadin1.jar!/org/json/JSONObject.class
\tjar:file:/D:/Programs/Maven/apache-maven-3.8.5-bin/repository/com/tdunning/json/1.8/json-1.8.jar!/org/json/JSONObject.class

You may wish to exclude one of them to ensure predictable runtime behavior

2022-11-03 16:06:35.823 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Starting JobServiceTest on 8-gcscl0011 with PID 25472 (started by User in D:\\Projects\\aip-40\\aip40\\aip-task\\aip-task-manage)
2022-11-03 16:06:35.824 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - The following profiles are active: local
2022-11-03 16:06:38.729 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsar-io.github.majusko.pulsar.properties.PulsarProperties' of type [io.github.majusko.pulsar.properties.PulsarProperties] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 16:06:38.760 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'io.github.majusko.pulsar.PulsarAutoConfiguration' of type [io.github.majusko.pulsar.PulsarAutoConfiguration$$EnhancerBySpringCGLIB$$b8bf2ed6] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 16:06:39.703 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'pulsarClient' of type [org.apache.pulsar.client.impl.PulsarClientImpl] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 16:06:39.737 [main] INFO  o.s.c.s.PostProcessorRegistrationDelegate$BeanPostProcessorChecker - Bean 'urlBuildService' of type [io.github.majusko.pulsar.utils.UrlBuildService] is not eligible for getting processed by all BeanPostProcessors (for example: not eligible for auto-proxying)
2022-11-03 16:06:39.812 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ enabled: ##true## ]
2022-11-03 16:06:39.814 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ days: {datalake_latest/config_info_log_latest=31, datalake_latest/device_info_log_latest=31, datalake_latest/dns_log_latest=31, datalake_latest/endpoint_security_log_latest=31, datalake_latest/file_scan_log_latest=31, datalake_latest/handle_info_log_latest=31, datalake_latest/http_log_latest=31, datalake_latest/network_security_log_latest=31, datalake_latest/qa_alert_log_latest=31, datalake_latest/qa_collect_log_latest=31} ]
2022-11-03 16:06:39.815 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathPrefix: ##s3a://aip/## ]
2022-11-03 16:06:39.815 [main] INFO  c.s.a.t.m.config.DatasetPartitionRetentionConfig - dataset.partition.retention=> [ pathSuffix: ##/tables/data_table/## ]
2022-11-03 16:06:41.426 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 16:06:41.426 [main] WARN  c.baomidou.mybatisplus.core.toolkit.ReflectionKit - Warn: SparkJobTemplateService's superclass not ParameterizedType
2022-11-03 16:06:41.528 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>admin:[*]
2022-11-03 16:06:41.528 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>browser:[dataspace_read, dataspace_browse, dataset_browse]
2022-11-03 16:06:41.528 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>owner:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_create, dataset_delete, dataset_browse, dataset_label, dataset_read, dataspace_authorization, dataset_authorization]
2022-11-03 16:06:41.528 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>reader:[dataspace_read, dataspace_browse, dataset_browse, dataset_label, dataset_read]
2022-11-03 16:06:41.528 [main] INFO  com.geniusai.aip.data.auth.config.RoleActionConfig - data.auth.roleActions=>updater:[dataspace_update, dataset_update, dataspace_delete, dataspace_read, dataspace_browse, dataset_delete, dataset_browse, dataset_label, dataset_read]
2022-11-03 16:06:43.700 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 16:06:45.400 [main] INFO  org.sparkproject.jetty.util.log - Logging initialized @13271ms to org.sparkproject.jetty.util.log.Slf4jLog
2022-11-03 16:06:50.447 [main] INFO  org.apache.hadoop.hive.conf.HiveConf - Found configuration file null
2022-11-03 16:06:51.084 [main] INFO  hive.metastore - Trying to connect to metastore with URI thrift://10.72.1.31:31083
2022-11-03 16:06:51.115 [main] INFO  hive.metastore - Opened a connection to metastore, current connections: 1
2022-11-03 16:06:51.143 [main] INFO  hive.metastore - Connected to metastore.
2022-11-03 16:06:52.028 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config start#####
2022-11-03 16:06:52.028 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutors: 5
2022-11-03 16:06:52.029 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorMemory: 16G
2022-11-03 16:06:52.029 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultExecutorCores: 2
2022-11-03 16:06:52.029 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultImageName: registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1
2022-11-03 16:06:52.029 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeClass: com.geniusai.aip.app.SparkSqlOnHive
2022-11-03 16:06:52.029 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.defaultCodeFile: s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar
2022-11-03 16:06:52.091 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - spark.config: spark.kubernetes.authenticate.driver.serviceAccountName:spark,
spark.eventLog.enabled:true,
spark.eventLog.dir:s3a://aip/spark-events/,
spark.history.fs.logDirectory:s3a://aip/spark-events/,
spark.hadoop.fs.s3a.access.key:minioadmin,
spark.hadoop.fs.s3a.secret.key:minioadmin,
spark.hadoop.fs.s3a.endpoint:http://10.65.194.24:3900,
spark.hadoop.fs.s3a.impl:org.apache.hadoop.fs.s3a.S3AFileSystem,
spark.hadoop.fs.s3a.path.style.access:true,
spark.hadoop.fs.s3a.connection.ssl.enabled:false,
spark.hadoop.fs.s3a.connection.establish.timeout:50000,
spark.hadoop.fs.s3a.connection.timeout:2000000,
spark.hadoop.fs.s3a.threads.max:100,
spark.hadoop.fs.s3a.max.total.tasks:5000,
spark.hadoop.fs.s3a.paging.maximum:2000,
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version:2,
spark.kryoserializer.buffer.max:1024m,
spark.sql.caseSensitive:true,
spark.sql.cbo.enabled:true,
spark.sql.cbo.starSchemaDetection:true,
spark.sql.datetime.java8API.enabled:false,
spark.sql.sources.partitionOverwriteMode:dynamic,
spark.sql.adaptive.enabled:true,
spark.worker.timeout:1000000,
spark.dynamicAllocation.enabled:false,
spark.shuffle.service.enabled:false,
spark.shuffle.push.enabled:false,
spark.speculation:true,
spark.speculation.quantile:0.9,
spark.kubernetes.memoryOverheadFactor:0.5,
spark.kubernetes.trust.certificates:true,
spark.network.timeout:1200s,
spark.shuffle.mapStatus.compression.codec:lz4,
spark.shuffle.io.maxRetries:30,
spark.shuffle.io.retryWait:30s,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName:OnDemand,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass:managed-nfs-storage,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit:100Gi,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path:/data,
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly:false
2022-11-03 16:06:52.092 [main] INFO  c.s.aip.task.manage.job.config.SparkJobConfig - #####init spark default config end#####
2022-11-03 16:06:54.759 [main] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Initializing ExecutorService 'scheduledExecutor'
2022-11-03 16:06:56.814 [main] INFO  com.geniusai.aip.task.manage.service.JobServiceTest - Started JobServiceTest in 22.669 seconds (JVM running for 24.684)
2022-11-03 16:06:57.679 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####start#####, updateStatusFlag: true
2022-11-03 16:06:57.739 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Starting...
2022-11-03 16:06:58.133 [main] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Start completed.
2022-11-03 16:06:58.147 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==>  Preparing: SELECT DISTINCT j.id AS jobId, jer.id AS execId, jel.id AS logId, j.name AS name, j.owner_name AS ownerName, j.write_mode AS writeMode, j.schedule_mode AS scheduleMode, j.schedule_time AS scheduleTime, j.engine_type AS engineType, j.job_type AS jobType, j.code_type AS codeType, j.\`rank\` AS \`rank\`, j.namespace AS namespace, j.label AS label, j.job_template_id AS jobTemplateId, j.working_dir AS workingDir, j.code AS code, j.input_dataset AS inputDataset, j.output_dataset AS outputDataset, j.output_path AS outputPath, j.command AS command, j.overwrite_entrypoint AS overwriteEntrypoint, j.args AS args, j.volume_mount AS volumeMount, j.node_selector AS nodeSelector, j.resource_memory AS resourceMemory, j.resource_cpu AS resourceCpu, j.resource_gpu AS resourceGpu, j.timeout AS timeout, j.retry AS retry, jer.status AS status, jt.image_name AS imageName, jt.accounts AS serviceAccountName, j.create_time AS createTime, j.update_time AS updateTime FROM job j JOIN job_execute_record jer ON j.id = jer.job_id JOIN job_template jt ON j.job_template_id = jt.id JOIN job_execute_log jel ON jer.id = jel.job_execute_id WHERE jer.status = 'queue' and jer.retry_no = 0 ORDER BY j.rank DESC, j.create_time DESC
2022-11-03 16:06:58.196 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - ==> Parameters: 
2022-11-03 16:06:58.354 [main] DEBUG c.s.a.t.m.m.J.selectSortedQueueJobs - <==      Total: 1
2022-11-03 16:06:58.364 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> queueJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.12, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 16:06:58.480 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==>  Preparing: SELECT COUNT( * ) FROM job_execute_record WHERE (status IN (?,?))
2022-11-03 16:06:58.481 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - ==> Parameters: running(String), scheduled(String)
2022-11-03 16:06:58.487 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.selectCount - <==      Total: 1
2022-11-03 16:06:58.488 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>runningJobs:0,jobRunLimit:5
2022-11-03 16:06:58.489 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> submitJobs: [JobVO(jobId=2, execId=2, logId=2, name=test12, ownerName=admin, writeMode=overwrite, scheduleMode=once, scheduleTime=0, engineType=spark, jobType=etl, codeType=sql, rank=1, namespace=user-spark, label=null, jobTemplateId=1, imageName=registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.12, serviceAccountName=spark, workingDir=null, code=select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, inputDataset=datalake_pulsar/dns_log, outputDataset=admin/dns_log_20220726_10000, outputPath=s3a://aip/admin/dns_log_20220726_10000/tables/data_table/, command=null, overwriteEntrypoint=null, args={"--num_worker":"5",
"--code_type":"Java",
"--code_arguments":"--inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000",
"--image":"registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1",
"--code_file":"s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar",
"--sparkConf":"spark.kubernetes.authenticate.driver.serviceAccountName=spark\\nspark.eventLog.enabled=true\\nspark.eventLog.dir=s3a://aip/spark-events/\\nspark.history.fs.logDirectory=s3a://aip/spark-events/\\nspark.hadoop.fs.s3a.access.key=minioadmin\\nspark.hadoop.fs.s3a.secret.key=minioadmin\\nspark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900\\nspark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem\\nspark.hadoop.fs.s3a.path.style.access=true\\nspark.hadoop.fs.s3a.connection.ssl.enabled=false\\nspark.hadoop.fs.s3a.connection.establish.timeout=50000\\nspark.hadoop.fs.s3a.connection.timeout=2000000\\nspark.hadoop.fs.s3a.threads.max=100\\nspark.hadoop.fs.s3a.max.total.tasks=5000\\nspark.hadoop.fs.s3a.paging.maximum=2000\\nspark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2\\nspark.kryoserializer.buffer.max=1024m\\nspark.sql.caseSensitive=true\\nspark.sql.cbo.enabled=true\\nspark.sql.cbo.starSchemaDetection=true\\nspark.sql.datetime.java8API.enabled=false\\nspark.sql.sources.partitionOverwriteMode=dynamic\\nspark.sql.adaptive.enabled=true\\nspark.worker.timeout=1000000\\nspark.dynamicAllocation.enabled=false\\nspark.shuffle.service.enabled=false\\nspark.shuffle.push.enabled=false\\nspark.speculation=true\\nspark.speculation.quantile=0.9\\nspark.kubernetes.memoryOverheadFactor=0.5\\nspark.kubernetes.trust.certificates=true\\nspark.network.timeout=1200s\\nspark.shuffle.mapStatus.compression.codec=lz4\\nspark.shuffle.io.maxRetries=30\\nspark.shuffle.io.retryWait=30s\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data\\nspark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false",
"--code_class":"com.geniusai.aip.app.SparkSqlOnHive"}, volumeMount=null, nodeSelector=null, resourceMemory=32G, resourceCpu=4, resourceGpu=null, timeout=180, retry=0, status=queue, createTime=2022-11-02T19:07:43, updateTime=2022-11-02T19:07:43)]
2022-11-03 16:06:58.491 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>start submit jobId:2,jobName:test12,status: queue
2022-11-03 16:06:58.495 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,launch_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 16:06:58.496 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 16:06:58.500 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 16:06:58.504 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 16:06:58.505 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:11:59 - MainThread - 任务[test12]执行失败error: HTTPConnectionPool(host='sparkjob-2-spark-admin-admin-dns-log-20220726-10000-93-ui-svc', port=4040): Max retries exceeded with url: /api/v1/applications/spark-4c16074c29df4d50a7cfd210063eab73/jobs (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fb22dd93e20>: Failed to establish a new connection: [Errno 111] Connection refused'))2022-11-03 14:46:07 - main - 开始调度任务[test12]
2022-11-03 14:46:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:45:52 - MainThread - 任务[test12]执行失败error: read_namespaced_pod_log() missing 1 required positional argument: 'namespace'2022-11-03 14:57:13 - main - 开始调度任务[test12]
2022-11-03 14:57:14 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:56:58 - MainThread - 任务[test12]执行失败error: (1054, "Unknown column 'pod_name' in 'field list'")2022-11-03 16:06:58 - main - 开始调度任务[test12]
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T16:06:58.501(LocalDateTime), 2(Long)
2022-11-03 16:06:58.529 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 16:06:58.542 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==>  Preparing: UPDATE job_execute_record SET status = ?, update_time = ? WHERE id = ?
2022-11-03 16:06:58.543 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - ==> Parameters: scheduled(String), 2022-11-03T16:06:58.538(LocalDateTime), 2(Long)
2022-11-03 16:06:58.548 [main] DEBUG c.s.a.t.m.m.JobExecuteRecordMapper.updateJobStatus - <==    Updates: 1
2022-11-03 16:06:58.549 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>update status finish jobId:2,execId:2,status:scheduled
2022-11-03 16:06:58.570 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>namespace:user-spark,podName:job-2-spark-admin-818569,imageName:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.12,serviceAccountName:spark
2022-11-03 16:06:59.066 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>args:[--num_worker, 5, --code_type, Java, --code_arguments, --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
spark.eventLog.enabled=true
spark.eventLog.dir=s3a://aip/spark-events/
spark.history.fs.logDirectory=s3a://aip/spark-events/
spark.hadoop.fs.s3a.access.key=minioadmin
spark.hadoop.fs.s3a.secret.key=minioadmin
spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
spark.hadoop.fs.s3a.path.style.access=true
spark.hadoop.fs.s3a.connection.ssl.enabled=false
spark.hadoop.fs.s3a.connection.establish.timeout=50000
spark.hadoop.fs.s3a.connection.timeout=2000000
spark.hadoop.fs.s3a.threads.max=100
spark.hadoop.fs.s3a.max.total.tasks=5000
spark.hadoop.fs.s3a.paging.maximum=2000
spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
spark.kryoserializer.buffer.max=1024m
spark.sql.caseSensitive=true
spark.sql.cbo.enabled=true
spark.sql.cbo.starSchemaDetection=true
spark.sql.datetime.java8API.enabled=false
spark.sql.sources.partitionOverwriteMode=dynamic
spark.sql.adaptive.enabled=true
spark.worker.timeout=1000000
spark.dynamicAllocation.enabled=false
spark.shuffle.service.enabled=false
spark.shuffle.push.enabled=false
spark.speculation=true
spark.speculation.quantile=0.9
spark.kubernetes.memoryOverheadFactor=0.5
spark.kubernetes.trust.certificates=true
spark.network.timeout=1200s
spark.shuffle.mapStatus.compression.codec=lz4
spark.shuffle.io.maxRetries=30
spark.shuffle.io.retryWait=30s
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
2022-11-03 16:06:59.067 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>env:KFJ_TASK_RESOURCE_CPU:4,KFJ_RUNNER:admin,KFJ_TASK_RESOURCE_GPU:null,KFJ_PIPELINE_ID:2,KFJ_NAMESPACE:user-spark,KFJ_TASK_NAME:job-2-spark-admin-818569,KFJ_DB_PORT:31006,KFJ_DB_HOST:10.65.220.6,KFJ_JOB_NAME:test12,KFJ_TASK_IMAGES:registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.12,KFJ_JOB_ID:2,KFJ_JOB_LOG_ID:2,KFJ_DB_DATABASE:aip,KFJ_PIPELINE_NAME:job-2-spark-admin-818569,KFJ_DB_PASSWORD:sangfor,KFJ_TASK_RESOURCE_MEMORY:32G,KFJ_JOB_EXECUTE_ID:2,KFJ_CREATOR:admin,KFJ_DB_USERNAME:root,KFJ_TASK_ID:2,KFJ_CODE_TYPE:sql,KFJ_RUN_ID:job-2-spark-admin-818569
2022-11-03 16:06:59.616 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=>submit job finish jobId:2,execId:2,pod:class V1Pod {
    apiVersion: v1
    kind: Pod
    metadata: class V1ObjectMeta {
        annotations: null
        clusterName: null
        creationTimestamp: 2022-11-03T08:06:32Z
        deletionGracePeriodSeconds: null
        deletionTimestamp: null
        finalizers: null
        generateName: null
        generation: null
        labels: null
        managedFields: [class V1ManagedFieldsEntry {
            apiVersion: v1
            fieldsType: FieldsV1
            fieldsV1: {f:spec={f:containers={k:{"name":"job-2-spark-admin-818569"}={.={}, f:args={}, f:env={.={}, k:{"name":"KFJ_CODE_TYPE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_CREATOR"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_DATABASE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_HOST"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PASSWORD"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_PORT"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_DB_USERNAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_EXECUTE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_LOG_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_JOB_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_NAMESPACE"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_PIPELINE_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUNNER"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_RUN_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_ID"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_IMAGES"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_NAME"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_CPU"}={.={}, f:name={}, f:value={}}, k:{"name":"KFJ_TASK_RESOURCE_GPU"}={.={}, f:name={}}, k:{"name":"KFJ_TASK_RESOURCE_MEMORY"}={.={}, f:name={}, f:value={}}}, f:image={}, f:imagePullPolicy={}, f:name={}, f:ports={.={}, k:{"containerPort":8080,"protocol":"TCP"}={.={}, f:containerPort={}, f:protocol={}}}, f:resources={.={}, f:limits={.={}, f:cpu={}, f:memory={}}, f:requests={.={}, f:cpu={}, f:memory={}}}, f:terminationMessagePath={}, f:terminationMessagePolicy={}}}, f:dnsPolicy={}, f:enableServiceLinks={}, f:restartPolicy={}, f:schedulerName={}, f:securityContext={}, f:serviceAccount={}, f:serviceAccountName={}, f:terminationGracePeriodSeconds={}}}
            manager: Kubernetes Java Client
            operation: Update
            time: 2022-11-03T08:06:32Z
        }]
        name: job-2-spark-admin-818569
        namespace: user-spark
        ownerReferences: null
        resourceVersion: 10195992
        selfLink: /api/v1/namespaces/user-spark/pods/job-2-spark-admin-818569
        uid: 8bc67f03-f7b5-42f6-85ee-20dfcd3e4cc3
    }
    spec: class V1PodSpec {
        activeDeadlineSeconds: null
        affinity: null
        automountServiceAccountToken: null
        containers: [class V1Container {
            args: [--num_worker, 5, --code_type, Java, --code_arguments, --inputSql=CREATE TABLE admin.dns_log_20220726_10000 ROW FORMAT SERDE  'org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe'  STORED AS INPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'   OUTPUTFORMAT  'org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat'  LOCATION 's3a://aip/admin/dns_log_20220726_10000/tables/data_table/' AS select *  from datalake_pulsar.dns_log where dt = '2022-07-26' order by v desc , tenant desc limit 10000, --image, registry.ai.geniusai.com/ai-sangfor/spark-k8s:v2022.7.4.1, --code_file, s3a://aip/app/jars/spark-hive-1.0-SNAPSHOT.jar, --sparkConf, spark.kubernetes.authenticate.driver.serviceAccountName=spark
            spark.eventLog.enabled=true
            spark.eventLog.dir=s3a://aip/spark-events/
            spark.history.fs.logDirectory=s3a://aip/spark-events/
            spark.hadoop.fs.s3a.access.key=minioadmin
            spark.hadoop.fs.s3a.secret.key=minioadmin
            spark.hadoop.fs.s3a.endpoint=http://10.65.194.24:3900
            spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem
            spark.hadoop.fs.s3a.path.style.access=true
            spark.hadoop.fs.s3a.connection.ssl.enabled=false
            spark.hadoop.fs.s3a.connection.establish.timeout=50000
            spark.hadoop.fs.s3a.connection.timeout=2000000
            spark.hadoop.fs.s3a.threads.max=100
            spark.hadoop.fs.s3a.max.total.tasks=5000
            spark.hadoop.fs.s3a.paging.maximum=2000
            spark.hadoop.mapreduce.fileoutputcommitter.algorithm.version=2
            spark.kryoserializer.buffer.max=1024m
            spark.sql.caseSensitive=true
            spark.sql.cbo.enabled=true
            spark.sql.cbo.starSchemaDetection=true
            spark.sql.datetime.java8API.enabled=false
            spark.sql.sources.partitionOverwriteMode=dynamic
            spark.sql.adaptive.enabled=true
            spark.worker.timeout=1000000
            spark.dynamicAllocation.enabled=false
            spark.shuffle.service.enabled=false
            spark.shuffle.push.enabled=false
            spark.speculation=true
            spark.speculation.quantile=0.9
            spark.kubernetes.memoryOverheadFactor=0.5
            spark.kubernetes.trust.certificates=true
            spark.network.timeout=1200s
            spark.shuffle.mapStatus.compression.codec=lz4
            spark.shuffle.io.maxRetries=30
            spark.shuffle.io.retryWait=30s
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.claimName=OnDemand
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.storageClass=managed-nfs-storage
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.options.sizeLimit=100Gi
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.path=/data
            spark.kubernetes.executor.volumes.persistentVolumeClaim.spark-local-dir-1.mount.readOnly=false, --code_class, com.geniusai.aip.app.SparkSqlOnHive]
            command: null
            env: [class V1EnvVar {
                name: KFJ_TASK_RESOURCE_CPU
                value: 4
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUNNER
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_GPU
                value: null
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_NAMESPACE
                value: user-spark
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_NAME
                value: job-2-spark-admin-818569
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PORT
                value: 31006
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_HOST
                value: 10.65.220.6
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_NAME
                value: test12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_IMAGES
                value: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.12
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_LOG_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_DATABASE
                value: aip
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_PIPELINE_NAME
                value: job-2-spark-admin-818569
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_PASSWORD
                value: sangfor
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_RESOURCE_MEMORY
                value: 32G
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_JOB_EXECUTE_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CREATOR
                value: admin
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_DB_USERNAME
                value: root
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_TASK_ID
                value: 2
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_CODE_TYPE
                value: sql
                valueFrom: null
            }, class V1EnvVar {
                name: KFJ_RUN_ID
                value: job-2-spark-admin-818569
                valueFrom: null
            }]
            envFrom: null
            image: registry.ai.geniusai.com/ai-sangfor/spark_launcher:20221103.12
            imagePullPolicy: IfNotPresent
            lifecycle: null
            livenessProbe: null
            name: job-2-spark-admin-818569
            ports: [class V1ContainerPort {
                containerPort: 8080
                hostIP: null
                hostPort: null
                name: null
                protocol: TCP
            }]
            readinessProbe: null
            resources: class V1ResourceRequirements {
                limits: {cpu=Quantity{number=0.200, format=DECIMAL_SI}, memory=Quantity{number=1073741824, format=BINARY_SI}}
                requests: {cpu=Quantity{number=0.100, format=DECIMAL_SI}, memory=Quantity{number=536870912, format=BINARY_SI}}
            }
            securityContext: null
            startupProbe: null
            stdin: null
            stdinOnce: null
            terminationMessagePath: /dev/termination-log
            terminationMessagePolicy: File
            tty: null
            volumeDevices: null
            volumeMounts: [class V1VolumeMount {
                mountPath: /var/run/secrets/kubernetes.io/serviceaccount
                mountPropagation: null
                name: spark-token-tjwkf
                readOnly: true
                subPath: null
                subPathExpr: null
            }]
            workingDir: null
        }]
        dnsConfig: null
        dnsPolicy: ClusterFirst
        enableServiceLinks: true
        ephemeralContainers: null
        hostAliases: null
        hostIPC: null
        hostNetwork: null
        hostPID: null
        hostname: null
        imagePullSecrets: null
        initContainers: null
        nodeName: null
        nodeSelector: null
        overhead: null
        preemptionPolicy: PreemptLowerPriority
        priority: 0
        priorityClassName: null
        readinessGates: null
        restartPolicy: Never
        runtimeClassName: null
        schedulerName: default-scheduler
        securityContext: class V1PodSecurityContext {
            fsGroup: null
            fsGroupChangePolicy: null
            runAsGroup: null
            runAsNonRoot: null
            runAsUser: null
            seLinuxOptions: null
            seccompProfile: null
            supplementalGroups: null
            sysctls: null
            windowsOptions: null
        }
        serviceAccount: spark
        serviceAccountName: spark
        setHostnameAsFQDN: null
        shareProcessNamespace: null
        subdomain: null
        terminationGracePeriodSeconds: 30
        tolerations: [class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/not-ready
            operator: Exists
            tolerationSeconds: 300
            value: null
        }, class V1Toleration {
            effect: NoExecute
            key: node.kubernetes.io/unreachable
            operator: Exists
            tolerationSeconds: 300
            value: null
        }]
        topologySpreadConstraints: null
        volumes: [class V1Volume {
            awsElasticBlockStore: null
            azureDisk: null
            azureFile: null
            cephfs: null
            cinder: null
            configMap: null
            csi: null
            downwardAPI: null
            emptyDir: null
            ephemeral: null
            fc: null
            flexVolume: null
            flocker: null
            gcePersistentDisk: null
            gitRepo: null
            glusterfs: null
            hostPath: null
            iscsi: null
            name: spark-token-tjwkf
            nfs: null
            persistentVolumeClaim: null
            photonPersistentDisk: null
            portworxVolume: null
            projected: null
            quobyte: null
            rbd: null
            scaleIO: null
            secret: class V1SecretVolumeSource {
                defaultMode: 420
                items: null
                optional: null
                secretName: spark-token-tjwkf
            }
            storageos: null
            vsphereVolume: null
        }]
    }
    status: class V1PodStatus {
        conditions: null
        containerStatuses: null
        ephemeralContainerStatuses: null
        hostIP: null
        initContainerStatuses: null
        message: null
        nominatedNodeName: null
        phase: Pending
        podIP: null
        podIPs: null
        qosClass: Burstable
        reason: null
        startTime: null
    }
}
2022-11-03 16:06:59.644 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==>  Preparing: SELECT id,job_id,job_execute_id,job_name,submit_logs,launch_logs,run_logs,create_time,update_time FROM job_execute_log WHERE id=?
2022-11-03 16:06:59.644 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - ==> Parameters: 2(Long)
2022-11-03 16:06:59.647 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.selectById - <==      Total: 1
2022-11-03 16:06:59.648 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==>  Preparing: UPDATE job_execute_log SET job_id=?, job_execute_id=?, job_name=?, submit_logs=?, create_time=?, update_time=? WHERE id=?
2022-11-03 16:06:59.650 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - ==> Parameters: 2(Long), 2(Long), test12(String), 2022-11-02 19:07:43 - http-nio-8080-exec-1 - 保存任务[test12]成功,等待调度
2022-11-02 19:17:59 - main - 开始调度任务[test12]
2022-11-02 19:20:30 - main - 开始调度任务[test12]
2022-11-02 19:28:27 - main - 开始调度任务[test12]
2022-11-02 19:30:15 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:33:05 - main - 开始调度任务[test12]
2022-11-03 09:34:29 - main - 任务[test12]调度成功,等待运行
2022-11-03 09:41:15 - main - 开始调度任务[test12]
2022-11-03 09:42:26 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:11:13 - main - 开始调度任务[test12]
2022-11-03 10:12:21 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:22:47 - main - 开始调度任务[test12]
2022-11-03 10:23:01 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:32:25 - main - 开始调度任务[test12]
2022-11-03 10:32:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:37:29 - main - 开始调度任务[test12]
2022-11-03 10:37:30 - main - 任务[test12]调度成功,等待运行
2022-11-03 10:53:40 - main - 开始调度任务[test12]
2022-11-03 10:53:41 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:08:41 - main - 开始调度任务[test12]
2022-11-03 11:08:42 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:11:01 - main - 开始调度任务[test12]
2022-11-03 11:11:02 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:12:37 - main - 开始调度任务[test12]
2022-11-03 11:12:38 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:21:22 - main - 开始调度任务[test12]
2022-11-03 11:21:23 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:27:17 - main - 开始调度任务[test12]
2022-11-03 11:27:18 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:32:07 - main - 开始调度任务[test12]
2022-11-03 11:32:08 - main - 任务[test12]调度成功,等待运行
2022-11-03 11:35:35 - main - 开始调度任务[test12]
2022-11-03 11:35:36 - main - 任务[test12]调度成功,等待运行
2022-11-03 12:07:22 - main - 开始调度任务[test12]
2022-11-03 12:07:23 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:07:22 - MainThread - 任务[test12]执行失败2022-11-03 12:27:10 - main - 开始调度任务[test12]
2022-11-03 12:27:11 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:26:45 - MainThread - 任务[test12]执行失败2022-11-03 12:38:07 - main - 开始调度任务[test12]
2022-11-03 12:38:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:37:52 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 12:52:58 - main - 开始调度任务[test12]
2022-11-03 12:52:59 - main - 任务[test12]调度成功,等待运行

2022-11-03 12:52:43 - MainThread - 任务[test12]执行失败error: Expecting value: line 1 column 1 (char 0)2022-11-03 14:12:14 - main - 开始调度任务[test12]
2022-11-03 14:12:15 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:11:59 - MainThread - 任务[test12]执行失败error: HTTPConnectionPool(host='sparkjob-2-spark-admin-admin-dns-log-20220726-10000-93-ui-svc', port=4040): Max retries exceeded with url: /api/v1/applications/spark-4c16074c29df4d50a7cfd210063eab73/jobs (Caused by NewConnectionError('<urllib3.connection.HTTPConnection object at 0x7fb22dd93e20>: Failed to establish a new connection: [Errno 111] Connection refused'))2022-11-03 14:46:07 - main - 开始调度任务[test12]
2022-11-03 14:46:08 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:45:52 - MainThread - 任务[test12]执行失败error: read_namespaced_pod_log() missing 1 required positional argument: 'namespace'2022-11-03 14:57:13 - main - 开始调度任务[test12]
2022-11-03 14:57:14 - main - 任务[test12]调度成功,等待运行

2022-11-03 14:56:58 - MainThread - 任务[test12]执行失败error: (1054, "Unknown column 'pod_name' in 'field list'")2022-11-03 16:06:58 - main - 开始调度任务[test12]
2022-11-03 16:06:59 - main - 任务[test12]调度成功,等待运行
(String), 2022-11-02T19:07:43(LocalDateTime), 2022-11-03T16:06:59.648(LocalDateTime), 2(Long)
2022-11-03 16:06:59.662 [main] DEBUG c.s.a.t.m.mapper.JobExecuteLogMapper.updateById - <==    Updates: 1
2022-11-03 16:06:59.674 [main] INFO  c.s.aip.task.manage.schedule.task.JobScheduleTask - scheduled.job.schedule=> #####end#####
2022-11-03 16:06:59.711 [SpringContextShutdownHook] INFO  o.s.scheduling.concurrent.ThreadPoolTaskScheduler - Shutting down ExecutorService 'scheduledExecutor'
2022-11-03 16:06:59.819 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown initiated...
2022-11-03 16:06:59.843 [SpringContextShutdownHook] INFO  com.zaxxer.hikari.HikariDataSource - HikariPool-1 - Shutdown completed.
2022-11-03 16:06:59.844 [SpringContextShutdownHook] INFO  org.apache.pulsar.client.impl.PulsarClientImpl - Client closing. URL: pulsar://10.72.1.31:31250
`
// 超大log数据
exports.check = function () {
    // return true;
    return false;
};
exports.mockData = function () {
    return {
        "code": "0000000",
        "msg": "suceess",
        "success": true,
        "data": {
            "log":data,
            loopFlag:1,
            retryCnt:3,
            retryNo:0,
        }
    }
};
