import pandas as pd
import numpy as np

# 用于减少给定的连续属性值的个数
# 在连续的属性的值域上，将值域划分为若干个离散的区间，最后用不同的符号或者整数值代表落在每个子区间中的属性值
# pd.qcut() 将数据分成相同的n组，使得每组内的数据个数都差不多，自动化分区间（仅传入想要分割得到的组数，pd自动帮你划分区间）
# pd.cut() 自定义区间分组，除了传入要离散的数据，还要传入分割区间 bins
# pd.get_dummies() 实现哑变量矩阵，在获得离散数据后的一步操作，独热矩阵（one-hot编码矩阵）

data = pd.read_csv("stock_day.csv").drop(["ma5", "ma10", "ma20", "v_ma5", "v_ma10", "v_ma20"], axis=1)
# print(data.head())
"""
             open   high  close  ...  price_change  p_change  turnover
2018-02-27  23.53  25.88  24.16  ...          0.63      2.68      2.39
2018-02-26  22.80  23.78  23.53  ...          0.69      3.02      1.53
2018-02-23  22.88  23.37  22.82  ...          0.54      2.42      1.32
2018-02-22  22.25  22.76  22.28  ...          0.36      1.64      0.90
2018-02-14  21.49  21.99  21.92  ...          0.44      2.05      0.58
"""
# 1.自动离散化
high = data["high"].head()
qcut = pd.qcut(high, q=10)
print(qcut)
"""
2018-02-27                  (25.04, 25.88]
2018-02-26                  (23.698, 24.2]
2018-02-23                 (23.126, 23.37]
2018-02-22                (22.606, 22.882]
2018-02-14    (21.988999999999997, 22.298]
"""
print(qcut.value_counts())
"""
(21.988999999999997, 22.298]    1
(22.606, 22.882]                1
(23.126, 23.37]                 1
(23.698, 24.2]                  1
(25.04, 25.88]                  1
(22.298, 22.606]                0
(22.882, 23.126]                0
(23.37, 23.534]                 0
(23.534, 23.698]                0
(24.2, 25.04]                   0
"""

# 2. 按照指定区间划分
# 指定分组区间
bins = [-100, -7, -5, -3, 0, 3, 5, 7, 100]
p_count = pd.cut(data["high"], bins)
print(p_count.value_counts())
"""
(7, 100]      643
(-100, -7]      0
(-7, -5]        0
(-5, -3]        0
(-3, 0]         0
(0, 3]          0
(3, 5]          0
(5, 7]          0
"""

# 3.将获得的离散化数据进行哑矩阵处理
dummies = pd.get_dummies(p_count, prefix="rise")
print(dummies.head())
"""
            rise_(-100, -7]  rise_(-7, -5]  ...  rise_(5, 7]  rise_(7, 100]
2018-02-27            False          False  ...        False           True
2018-02-26            False          False  ...        False           True
2018-02-23            False          False  ...        False           True
2018-02-22            False          False  ...        False           True
2018-02-14            False          False  ...        False           True

"""
