from matplotlib import pyplot as plt
import numpy as np
import os
from pyspark import SparkContext
from pyspark.mllib.regression import LinearRegressionWithSGD, LabeledPoint

os.environ['PYSPARK_PYTHON'] = "python3"
points = [(0.43, 0.23), (0.67, 0.67), (1.0, 0.47), (1.33, 0.6), (1.23, 1.17), (1.67, 1.27), (1.23, 1.0), (1.83, 1.5), (1.53, 0.93), (1.93, 1.17), (2.23, 1.27), (2.17, 2.07), (2.97, 1.37), (3.5, 1.43), (2.9, 2.47), (2.63, 2.1), (2.17, 2.37), (2.63, 3.4), (3.27, 3.3), (2.97, 2.5), (2.83, 3.1), (2.63, 2.6), (2.2, 1.5), (2.93, 1.7), (2.93, 2.1), (3.3, 2.9), (3.07, 2.97), (3.0, 2.67), (2.4, 1.83), (2.3, 2.0), (2.23, 1.83), (2.43, 2.23), (2.57, 3.03), (3.33, 2.1), (3.73, 2.17), (3.7, 2.83), (3.97, 3.27)]
sc = SparkContext.getOrCreate()
data = [LabeledPoint(y, [x]) for x, y in points]
model = LinearRegressionWithSGD.train(sc.parallelize(data))
print(model.intercept, model.weights)
plt.scatter([x for x, _ in points], [y for _, y in points])
x = np.arange(0, 5, 0.1)
plt.plot(x, model.weights * x + model.intercept, color='r')
plt.show()