import fasttext


def base_test(log='base_test',
              train_path='data/cooking_train.txt',
              valid_path='data/cooking_valid.txt',
              **kwargs):
    """
    进行模型训练，预测，评估
    """
    print('=' * 20, end='')
    print(log, end='')
    print('=' * 20)
    model = fasttext.train_supervised(train_path, **kwargs)
    r1 = model.predict("Which baking dish is best to bake a banana bread ?")
    # （标签，对应概率）
    print(f'r1={r1}')  # (('__label__baking',), array([0.06315484]))
    r2 = model.predict("Why not put knives in the dishwasher?")
    print(f'r2={r2}')  # (('__label__food-safety',), array([0.06400231]))
    test_result = model.test(valid_path)
    # （样本数量，精度，召回率）
    print(f'test_result={test_result}')
    print('=' * 50)
    return model


def preprocess_data():
    """
    增加数据预处理步骤
    """
    base_test(log='preprocess_data', train_path='data/cooking.pre.train.txt', valid_path='data/cooking.pre.valid.txt')


def adapt_epoch():
    """
    调整训练轮次
    """
    base_test(log='adapt_epoch', train_path='data/cooking.pre.train.txt', valid_path='data/cooking.pre.valid.txt',
              epoch=25)


def adapt_lr():
    """
    调整学习率
    """
    base_test(log='atapt_lr', train_path='data/cooking.pre.train.txt', valid_path='data/cooking.pre.valid.txt',
              epoch=25, lr=1.0)


def adapt_word_n_grams():
    """
    调整 n-grams
    """
    base_test(log='adapt_word_n_grams', train_path='data/cooking.pre.train.txt',
              valid_path='data/cooking.pre.valid.txt',
              epoch=25, lr=1.0, word_n_grams=2)


def adapt_loss():
    """
    调整损失函数为层次softmax
    """
    base_test(log='adapt_loss', train_path='data/cooking.pre.train.txt', valid_path='data/cooking.pre.valid.txt',
              epoch=25, lr=1.0, word_n_grams=2, loss='hs')


def autotune():
    """
    自动调参
    """
    base_test(log='autotune', train_path='data/cooking.pre.train.txt', valid_path='data/cooking.pre.valid.txt',
              autotuneValidationFile='data/cooking.pre.valid.txt', autotuneDuration=600)


def loss_use_ova():
    model = fasttext.train_supervised(input="data/cooking.pre.train.txt", lr=0.2, epoch=25, wordNgrams=2,
                                      loss='ova')
    r = model.predict("Which baking dish is best to bake a banana bread ?", k=-1, threshold=0.5)
    print(f'loss_use_ova r={r}')
    # (('__label__baking', '__label__bread', '__label__equipment'), array([1.000, 0.996, 0.813]))
    pass


def save_and_load_model():
    model = base_test()
    model.save_model("model/fasttext.bin")
    model = fasttext.load_model("model/fasttext.bin")
    print(f'save_and_load_model model={model}')


if __name__ == '__main__':
    # base_test()  # (3000, 0.05, 0.022)
    # preprocess_data()  # (3000, 0.081, 0.035)
    # adapt_epoch()  # (3000, 0.26, 0.11)
    # adapt_lr()  # (3000, 0.61, 0.26)
    # adapt_word_n_grams()  # (3000, 0.59, 0.25)
    # adapt_loss()  # (3000, 0.60, 0.26)
    # autotune()  # (3000, 0.64, 0.28)
    # loss_use_ova()
    save_and_load_model()
    pass
