'''
import torch
import tensorflow as tf
class_num = 3
batch_size = 4
a=torch.tensor([[1, 0, 1],[1,0,0],[0,1,0],[1,1,1]])
print(a)
# a=a[:,2]
# print(a)
# c=tf.one_hot(a, depth=2, dtype=tf.int32)
# print(c)
loss_num_label = []
logits_num_label = []
for i in range(3):
    logits = torch.Tensor(4,2)
    print('logits:',logits)
    logits_num_label.append(logits)
    one_hot_labels = tf.one_hot(a[:,i], depth=2, dtype=tf.int32)
    per_example_loss = tf.nn.softmax_cross_entropy_with_logits(labels=one_hot_labels,logits=logits)
    print('per_example_loss:',per_example_loss)
    loss_num_label.append(tf.reduce_mean(per_example_loss))
print('loss_num_label:',loss_num_label)
s=tf.stack(logits_num_label, 0)
print('s:',s)
logits_num_label = tf.transpose(s,[1,0,2])
print('logits_num_label:', logits_num_label)
loss_num_label = tf.stack(loss_num_label, 0)
print('loss_num_label:',loss_num_label)
probabilities = tf.nn.sigmoid(logits_num_label)  # [batch,num_label,2]
print('probabilities:',probabilities)
predictions = tf.argmax(probabilities,2)
print('predictions:', predictions)

'''
import tensorflow as tf
import torch
import torch.nn.functional as F
import torch.nn as nn
import random
random.seed(2021)

# a=torch.Tensor(4,2)
# b=torch.tensor([1, 0, 1, 1])
# print(a)
# print(b)
# criterion=nn.CrossEntropyLoss()
# # c=F.cross_entropy(a, b)
# c=criterion(a,b)
# print('c:',c)
# logits=tf.one_hot(b, depth=2, dtype=tf.int32)
# print('logits:', logits)
# per_example_loss = tf.nn.softmax_cross_entropy_with_logits(labels=logits,logits=a)
# print('per_example_loss:', per_example_loss)
# print(tf.reduce_mean(per_example_loss))

import torch
import torch.nn as nn
import tensorflow.compat.v1 as tf
class_num = 3
batch_size = 4
a=torch.tensor([[1, 0, 1],[1,0,0],[0,1,0],[1,1,1]])
print(a)
criterion=nn.CrossEntropyLoss()
loss_num_label = []
logits_num_label = []
for i in range(3):  # num of labels
    logits = torch.Tensor(4,2)  # model output [batch, binary classification=2]
    print('logits:',logits)
    logits_num_label.append(logits)
    one_hot_labels = a[:,i]
    per_example_loss =criterion(logits,one_hot_labels)
    print('per_example_loss:',per_example_loss)
    loss_num_label.append(per_example_loss)
print('loss_num_label:',loss_num_label)
s=torch.stack(logits_num_label, 0)  # [3,4,2]
print('s:',s)
logits_num_label = s.transpose(0,1)
print('logits_num_label:', logits_num_label)   # [4,3,2]  这一步可以直接从dim——(num_class*2) reshape(batch, num_class, 2)
# loss_num_label = tf.stack(loss_num_label, 0)
loss_num_label = torch.stack(loss_num_label, 0)
print('loss_num_label:',loss_num_label)
m = nn.Sigmoid()
zy = m(logits_num_label)
print('zy:', zy)
probabilities = tf.nn.sigmoid(logits_num_label)  # [batch,num_label,2]
print('probabilities:',probabilities)

predictions = tf.to_int32(tf.argmax(probabilities,2))
print('predictions:', predictions)
# predictions = torch.argmax(predictions,2)
# print('predictions:', predictions)  # [batch, num_class] 在第三个维度做argmax


# import torch
# import torch.nn as nn
# a=torch.tensor([[[1,2],[5,2],[1,6]],[[4,8],[9,7],[7,8]]])
# print(a,a.shape)
# m=torch.argmax(a,dim=2)
# print(m)
# import tensorflow.compat.v1 as tf
# predictions = tf.to_int32(tf.argmax(a,2))
# print(predictions)





