from pyspark.sql import SparkSession
from pyspark.sql.functions import col, dayofweek, lit, sum

if __name__ == "__main__":
    spark = SparkSession \
        .builder \
        .master("local[3]") \
        .appName("FixedInvestment") \
        .getOrCreate()

    # Load the Parquet files from the fund/parquet/ folder
    parquetDF = spark.read \
        .format("parquet") \
        .load("data/fund/parquet/*.parquet")

    parquetDF.show(5)
    print(parquetDF.schema.json())

    # Get distinct fund IDs
    fund_ids = parquetDF.select("fundid").distinct().collect()

    for fund in fund_ids:
        fundid = fund["fundid"]
        print(f"Processing fundid: {fundid}")

        # Filter data for the current fund
        fundDF = parquetDF.filter(col("fundid") == fundid)

        # Logic for Monday investment
        mondayDF = fundDF.filter(dayofweek(col("Date")) == 2)
        mondayDF = mondayDF.withColumn("investment", lit(100))
        mondayDF = mondayDF.withColumn("unitsPurchased", (col("investment") * (1 - 0.015)) / col("price"))  # Apply 1.5% fee
        finalPrice = fundDF.orderBy(col("Date").desc()).select("accumulativePrice").first()["accumulativePrice"]
        mondayDF = mondayDF.withColumn("earn", col("unitsPurchased")*(lit(finalPrice) - col("accumulativePrice")))
        mondayResultDF = mondayDF.groupBy().agg(
            sum("investment").alias("totalInvestment"),
            sum("unitsPurchased").alias("totalUnits"),
            sum("earn").alias("totalEarn")
        )
        mondayResultDF = mondayResultDF.withColumn(
            "investmentRate", col("totalEarn") / col("totalInvestment") * 100
        )
     
        print(f"Monday Results for fundid: {fundid}")
        mondayResultDF.show()

        # Logic for Friday investment
        fridayDF = fundDF.filter(dayofweek(col("Date")) == 6)
        fridayDF = fridayDF.withColumn("investment", lit(100))
        fridayDF = fridayDF.withColumn("unitsPurchased", (col("investment") * (1 - 0.015)) / col("price"))  # Apply 1.5% fee
        fridayDF = fridayDF.withColumn("earn", col("unitsPurchased")*(lit(finalPrice) - col("accumulativePrice")))
        fridayResultDF = fridayDF.groupBy().agg(
            sum("investment").alias("totalInvestment"),
            sum("unitsPurchased").alias("totalUnits"),
            sum("earn").alias("totalEarn")
        )
        
        fridayResultDF = fridayResultDF.withColumn(
            "investmentRate", col("totalEarn") / col("totalInvestment") * 100
        )
      
        print(f"Friday Results for fundid: {fundid}")
        fridayResultDF.show()

    spark.stop()
