package com.light.dp.spark.service.impl;

import com.light.dp.spark.service.SparkService;
//import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.apache.spark.sql.SparkSession;
import org.springframework.stereotype.Service;

@Service
public class SparkServiceImpl implements SparkService {

//    @Resource
//    private JavaSparkContext javaSparkContext;

    @Override
    public Dataset<Row> readDataFromFile(String path) {
        SparkSession spark = SparkSession.builder()
                .appName("ReadJSONFileJavaExample")
                .master("local[*]")
                .getOrCreate();


//        SparkSession spark = SparkSession.builder()
//              .sparkContext(javaSparkContext.sc())
//              .getOrCreate();

        // 假设从项目的 resources 目录下的一个文本文件读取数据，可根据实际情况调整文件路径和格式
        Dataset<org.apache.spark.sql.Row> data = spark.read().text(path);

        // 简单打印数据的行数示例
        long count = data.count();
        System.out.println("数据行数: " + count);

        return data;
    }
}