# -*- coding: utf-8 -*-
"""
pr曲线
Created on Wed Apr 25 09:34:36 2018

@author: Allen
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets

digits = datasets.load_digits()
X = digits.data
y = digits.target.copy()

y[digits.target == 9] = 1
y[digits.target != 9] = 0

from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split( X, y, random_state = 666 )

from sklearn.linear_model import LogisticRegression
log_reg = LogisticRegression()
log_reg.fit( X_train, y_train )
decision_scores = log_reg.decision_function( X_test )
thresholds = np.arange( np.min( decision_scores ), np.max( decision_scores ), 0.1 )

from sklearn.metrics import precision_score
from sklearn.metrics import recall_score

precisions = []
recalls = []
for threshold in thresholds:
    y_predict = np.array( decision_scores >= threshold, dtype = "int" )
    precisions.append( precision_score( y_test, y_predict ) )
    recalls.append( recall_score( y_test, y_predict ) )

plt.plot( thresholds, precisions )
plt.plot( thresholds, recalls )
plt.show()
'''
精确率随着thresholds增大而增大，recall相反
'''

### precision——recall曲线
plt.plot( precisions, recalls )
plt.show()
'''
在图中可以看到有一个突然下降的点，这个点就是查准率和召回率最佳的平衡位置。
'''

from sklearn.metrics import precision_recall_curve
precisions, recalls, thresholds = precision_recall_curve( y_test, decision_scores )
'''需要注意的是：thresholds比前两个变量少一个元素'''

plt.plot( thresholds, precisions[:-1] )
plt.plot( thresholds, recalls[:-1] )
plt.show()