CREATE TABLE hiveReport (
    logtitle string,
    type string,
    id string,
    bindid string,
    operatortag string,
    infoid string,
    userid string,
    cookieid string,
    virtualnumber string,
    calleephone string,
    state string,
    bizid string,
    displaytype string,
    bindstarttime string,
    bindexpiredtime string,
    acutalexpiretime string,
    addtime string,
    updatetime string,
    localid string,
    cateid string,
    sid string,
    ext string,
    usertype string,
    infotype string,
    calleduserid string,
    tjfrom string,
    dt string,
    ts AS TO_TIMESTAMP(FROM_UNIXTIME(cast(addtime as bigint)/1000,'yyyy-MM-dd HH:mm:ss')),
WATERMARK FOR ts AS ts - INTERVAL '30' SECOND
) with (
  'connector.type' = 'kafka',
  'connector.version' = 'universal',
  'connector.topic' = 'hdp_lbg_zhaopin_numberaxb_axInfo',
  'connector.properties.bootstrap.servers' = '10.135.9.4:9092,10.135.9.5:9092,10.135.9.6:9092,10.135.9.7:9092,10.135.9.8:9092',
  'connector.properties.zookeeper.connect' = '10.135.9.4:2181,10.135.9.5:2181,10.135.9.6:2181/58_kafka_cluster',
  'connector.properties.group.id' = 'flink-sql-hdp_lbg_zhaopin_numberaxb_axInfo', 'connector.startup-mode' = 'latest-offset',
  'connector.properties.client.id' = 'hdp_lbg_zhaopin-hdp_lbg_zhaopin_numberaxb_axInfo-uZLmM',
  'connector.properties.field.delimiter' = '\0001',
  'connector.properties.line.delimiter' = '\n', 'format.type' = 'text_split');
CREATE TABLE hR_output (
	rowkey string,
	val string
) with (
  'connector.type' = 'wtable',
  'connector.wtable.namecenter' = 'nameprod.wtable.58dns.org',
  'connector.wtable.bid' = '135266315',
  'connector.wtable.tableid' = '2',
  'connector.wtable.password' = 'QzpXbzY2wsRU7mAF',
  'connector.write.buffer-flush.max-rows' = '100',
  'connector.write.buffer-flush.interval' = '30s',
  'connector.key.ttl' = '691200'

);
INSERT INTO hR_output
SELECT
	concat('zp_tjfrom_monitor|2|',date_format(TUMBLE_START(ts, INTERVAL '30' minute),'yyyy-MM-dd HH:mm:ss'),'|',bizid )  as rowkey ,
	cast( round((sum ( case when tjfrom<>'-' and tjfrom <>'' then 1 end )/ cast(count(1) as DOUBLE))*100,2) as string)  as val
FROM hiveReport
GROUP BY
     TUMBLE(ts, INTERVAL '30' MINUTE),
        bizid;