package com.pw.study.flink.demo

import org.apache.flink.streaming.api.scala._

/**
 * @Desc: TODO
 * @author: future
 * @since: 2022/5/15-14:16
 */
object UserBehaviorDemo {
  def main(args: Array[String]): Unit = {
    val env: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment
    env.setParallelism(1)

    val ds: DataStream[String] = env.readTextFile("data/file/UserBehavior.csv")
    val result= ds.map(bean => {
      val line: Array[String] = bean.split(",")
      UserBehavior(line(0), line(1), line(2), line(3), line(4))
    }).filter(_.behavior=="pv").map(bean=>("pv",1L)).keyBy(_._1).sum(1)

    result.print()

    env.execute("userBehavior")
  }
}

case class UserBehavior(id: String, itemId: String, categoryId: String, behavior: String, ts: String)
