package com.example.flinkcourse.lesson4.example;

import com.example.flinkcourse.lesson4.model.Event;
import com.example.flinkcourse.lesson4.source.SourceFactory;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.Arrays;

/**
 * 数据源使用示例
 * 展示各种数据源的使用方式
 */
public class SourceExample {
    private static final Logger LOG = LoggerFactory.getLogger(SourceExample.class);

    public static void main(String[] args) throws Exception {
        // 创建执行环境
        StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
        
        // 1. 使用自定义数据源
        LOG.info("Example 1: Using custom source");
        DataStream<Event> customStream = env.addSource(SourceFactory.createCustomSource());
        customStream.print("Custom Source");
        
        // 2. 使用 Kafka 数据源
        LOG.info("Example 2: Using Kafka source");
        DataStream<String> kafkaStream = env.fromSource(
            SourceFactory.createKafkaSource(
                "localhost:9092",
                "input-topic",
                "flink-group"
            ),
            org.apache.flink.api.common.eventtime.WatermarkStrategy.noWatermarks(),
            "Kafka Source"
        );
        kafkaStream.print("Kafka Source");
        
        // 3. 使用文件数据源
        LOG.info("Example 3: Using file source");
        DataStream<String> fileStream = env.fromSource(
            SourceFactory.createFileSource("/path/to/input", 1000),
            org.apache.flink.api.common.eventtime.WatermarkStrategy.noWatermarks(),
            "File Source"
        );
        fileStream.print("File Source");
        
        // 4. 使用 JDBC 数据源
        LOG.info("Example 4: Using JDBC source");
        DataStream<org.apache.flink.types.Row> jdbcStream = env.fromSource(
            SourceFactory.createJdbcSource(
                "jdbc:mysql://localhost:3306/flink_db",
                "SELECT * FROM events"
            ),
            org.apache.flink.api.common.eventtime.WatermarkStrategy.noWatermarks(),
            "JDBC Source"
        );
        jdbcStream.print("JDBC Source");
        
        // 5. 使用 Socket 数据源
        LOG.info("Example 5: Using socket source");
        DataStream<String> socketStream = env.addSource(
            SourceFactory.createSocketSource("localhost", 9999)
        );
        socketStream.print("Socket Source");
        
        // 6. 使用集合数据源
        LOG.info("Example 6: Using collection source");
        DataStream<Event> collectionStream = env.addSource(
            SourceFactory.createCollectionSource(
                Arrays.asList(
                    Event.builder().id("1").type("test").timestamp(System.currentTimeMillis()).amount(100.0).build(),
                    Event.builder().id("2").type("test").timestamp(System.currentTimeMillis()).amount(200.0).build()
                )
            )
        );
        collectionStream.print("Collection Source");
        
        // 7. 使用并行数据源
        LOG.info("Example 7: Using parallel source");
        DataStream<Event> parallelStream = env.addSource(
            SourceFactory.createParallelSource(4)
        );
        parallelStream.print("Parallel Source");
        
        // 执行作业
        env.execute("Source Examples");
    }
} 