# # encoding:utf-8
# import numpy as np
# import tensorflow as tf
#
#
# def sigmoid(x):
#     return 1.0 / (1 + np.exp(-x))
#
#
# labels = np.array([[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]])
# logits = np.array([[-800, -2., -3], [-10., -700., -13.], [-1., -2., 0]])
# y_pred = sigmoid(logits)
# prob_error1 = -labels * np.log(y_pred) - (1 - labels) * np.log(1 - y_pred)
# print("未优化的代码：", prob_error1)
# prob_error2 = np.greater_equal(logits, 0) - logits * labels + np.log(1 + np.exp(-np.abs(logits)))
# prob_error3 = logits - logits * labels + np.log(1 + np.exp(-logits))
# print("优化的结果：", prob_error2)
# print("未优化的结果：", prob_error3)
#
# with tf.Session() as sess:
#     print("tf优化的结果：")
#     print(sess.run(tf.nn.sigmoid_cross_entropy_with_logits(labels=labels, logits=logits)))

# encoding:utf-8
import tensorflow as tf
import numpy as np

def sigmoid(x):
    return 1.0/(1+np.exp(-x))

sess =tf.Session()

# 5个样本三分类问题， 且一个样本可以同时拥有多类
y = np.array([[1,0,0],[0,1,0],[0,0,1],[1,1,0],[0,1,0]])
logits = np.array([[-800,3,2],[3,10,1],[1,2,5.1],[4,6,1],[3,6,1]])

logits = tf.nn.relu(logits)
logits = sess.run(logits)

y_pred = sigmoid(logits)
E1 = -y*np.log(y_pred)-(1-y)*np.log(1-y_pred)
print(E1)    # 按计算公式计算的结果

# print(np.greater_equal(logits,0))
# prob_error2=np.greater_equal(logits,0)-logits*y+np.log(1+np.exp(-np.abs(logits)))
# print(prob_error2)
y = np.array(y).astype(np.float64)   #labels是float64的数据类型
E2 = sess.run(tf.nn.sigmoid_cross_entropy_with_logits(labels=y,logits=logits))
print(E2)                           # 输出的E1， E2 结果相同

# a0 = [(60,495),(60,497),(65,500)]
# b0 = [[1, 0, 0, 1],[1,1,1,1],[0,0,0,1]]
# c=[2,3,4,9,0,116,67,9]
# a = tf.convert_to_tensor(a0,name='a',dtype=tf.int32)
# b = tf.convert_to_tensor(b0,name='b',dtype=tf.float32)
# aa = tf.cast(a, dtype=tf.float32)
#
# neg_dis = []
# neg_dis_edge = []
# def fn_edge_1():
#     neg_dis_edge.append(6)
#     return tf.constant(1)
# def fn_edge_2():
#     return tf.constant(0)
# def fn_dis_1(a):
#     neg_dis.append(a)
#     return tf.constant(1)
# def fn_dis_2(a):
#     return tf.constant(0)
# n = tf.constant(0)
# x = a
# def for_body(n,x):
#     # n_edge = tf.unstack(x[n])
#     # n_dis = tf.constant(2)
#     # condition = tf.greater(n_dis, tf.constant(1))
#     # tf.cond( condition, lambda: fn_edge_1(), lambda: fn_edge_2())
#     # tf.cond( condition, lambda: fn_dis_1(n_dis), lambda: fn_dis_2(n_dis))
#     n = n+1
#     return n,x
#
# def for_condition(n,x):
#     return n<x.get_shape().as_list()[0]
#
# n,x = tf.while_loop(for_condition, for_body, [n,x])
# c0=a.get_shape().as_list()[0]
# d0=neg_dis
# dd = sess.run(n)
print("xx")
# d_2d_1 = tf.reduce_sum(b0, axis=0)
# c0 = tf.nn.top_k(c,2)
#
# c1 = sess.run(c0)
# dd0= sess.run(d_2d_1)
# dd = sess.run(aa)
#
# def get_positive_dis(input):
#     edge = input[0]
#     d2g = input[1]
#
#     dis_vector = tf.zeros((32,))
#     # list_edge = list(edge)
#     list_edge = tf.unstack(edge)
#     dis = tf.linalg.norm(d2g)
#     dis_vector = d2g
#     return dis#,dis_vector
#
# # def get_negative_sample(a,b):
# elems = a,b
# total = tf.map_fn(get_positive_dis,elems,dtype='float')
#
# q0= sess.run(total)
#
#
#
# dis_vector = tf.zeros((32,))
# # list_edge = list(edge)
# list_edge = tf.unstack(a[0])
# # list_edge = a[0].get_shape().as_list()
# dis = tf.linalg.norm(b[0]-b[1])
# dis_vector = b[0]-b[1]
# q1= sess.run(list_edge)
# q2= sess.run(dis)
# q3= sess.run(dis_vector)
#
# A = [[1, 2, 3], [4, 2, 3]]
# A = (1, 2)
# B = tf.unstack(A, axis=0)
#
# # ccc = tf.map_fn(,A,dtype='float')
# with tf.Session() as sess:
#    zz= sess.run(B)
#    aaa=sess.run(B[0])
#    aaaa = sess.run(B[1])
# print("zz")



# # import numpy as np
# # import tensorflow as tf
# #
# # # Required Python Package
# # import numpy as np
# #
# # def sigmoid(inputs):
# #     """
# #     Calculate the sigmoid for the give inputs (array)
# #     :param inputs:
# #     :return:
# #     """
# #     sigmoid_scores = [1 / float(1 + np.exp(- x)) for x in inputs]
# #     return sigmoid_scores
# #
# #
# # preds = [[2.0, 3.0, 5.0, 6.0],[1.0, 0.0, 2.0, 1.0]]
# # labels = [[1, 1, 0, 0],[1, 0, 0, 1]]
# # print(np.mean(labels))
# #
# # r_preds = tf.where(tf.greater_equal(preds, 0.5), tf.ones_like(labels), tf.zeros_like(labels))
# # correct_prediction = tf.equal(r_preds, labels)
# # accuracy_all = tf.cast(correct_prediction, tf.float32)
# # r = tf.reduce_mean(accuracy_all)
# #
# # s = tf.Session()
# # a1=s.run(r_preds)
# # a2=s.run(correct_prediction)
# # a3=s.run(accuracy_all)
# # a4=s.run(r)
# #
# # print("cc")
# # dic = {'a':31, 'bc':5, 'c':3, 'asd':4, 'aa':74, 'd':0}
# # dict= sorted(dic.items(), key=lambda d:d[0])
# # print dict
# #
# #
# # dic = {'a':31, 'bc':5, 'c':3, 'asd':4, 'aa':74, 'd':0}
# # # dict= sorted(dic.items(), key=lambda d:d[1])
# # print dict
# # print len(dict)
# # print dict[0][0]
#
# # test = np.array([1,2,3,4])
# # test2 = np.array([0,3,5,1])
# # print(np.sum(np.multiply(test,test2)))
# #
# # a = np.array([1, 2, 3])
# # b = np.array([1, 2, 3])
# # print((a == b).all())


# test_D012516 = [0.5031111, 0.5029171, 0.50445646, 0.507938, 0.51092416, 0.50494003,
#  0.5031706, 0.49521896, 0.5052576, 0.50412285, 0.46756163, 0.5054976,
#  0.506205, 0.50150347, 0.50319284, 0.5041572, 0.5017739, 0.49319336,
#  0.50723004, 0.5072962,  0.49755165, 0.5020647,  0.45738146, 0.49544322,
#  0.50454295, 0.5003281,  0.50078166, 0.501113,   0.50106066, 0.5018299,
#  0.501465,   0.50114214, 0.50198674, 0.50164783, 0.5061806,  0.4776836,
#  0.5010958,  0.49354452, 0.5073023,  0.48852724, 0.48852897, 0.47603345,
#  0.5064201,  0.50455296, 0.5034467,  0.5034916,  0.478645,   0.5068349,
#  0.49129522, 0.4988774,  0.5059225,  0.5091894,  0.50435805, 0.50527334,
#  0.5063351,  0.48802933, 0.49287906, 0.5069111,  0.49733752, 0.49914667,
#  0.49771222, 0.50294274, 0.5017159,  0.5047011,  0.5046432,  0.48794526,
#  0.49789968, 0.5014317,  0.4982821,  0.5001536,  0.50502425, 0.48410118,
#  0.48015797, 0.50165695, 0.4976049,  0.50130427, 0.5087849,  0.51061803,
#  0.49364135, 0.50152886, 0.5057862,  0.5032401,  0.4998189,  0.46880695,
#  0.467706,   0.5063602,  0.4963524,  0.49569425, 0.50411636, 0.50880057,
# 0.49975488,  0.50120074, 0.4943902,  0.4978705,  0.50087065,  0.50453424,
# 0.5008875,  0.4863877,  0.5012199,   0.47263354, 0.5006781,  0.50523007,
# 0.49838087,  0.48234963, 0.4625454,  0.50621533, 0.48631203,  0.459972,
# 0.4645163,  0.46397364,  0.5044225,  0.49583644,  0.506006,  0.49352297,
# 0.48704585,  0.50305074,  0.5026436, 0.50400007,  0.5034974, 0.5010273,
# 0.505049,   0.5032385,  0.5032875,   0.5032792,  0.5011756,  0.49505407,
# 0.50488764, 0.5048254,  0.49469218, 0.5039568,  0.50250506, 0.48670894,
# 0.48882312, 0.4994525,  0.5036423,  0.49469686, 0.4953245, 0.5040354,
# 0.5040212,  0.50466686, 0.5037774,  0.4977769,  0.49446303, 0.5027876,
# 0.49240625, 0.5007722,  0.49831215, 0.50307727, 0.49610022, 0.48212722,
# 0.5066172,  0.49173656, 0.5018634,  0.5057867,  0.49452737, 0.48946178,
# 0.5004845,  0.4928791,  0.5064993,  0.5036163, 0.5068992,   0.455172,
# 0.49311993, 0.50527805, 0.5006758,  0.4969702,  0.49125203, 0.49169275,
# 0.5008801,  0.5014773,  0.47977278,  0.5075325, 0.4981991,  0.497122,
# 0.50331694, 0.50397265, 0.5045023,  0.50014645, 0.50412846,  0.49064827,
# 0.49546117, 0.49782512, 0.5036595,  0.5002072,  0.50366277,  0.49488673,
# 0.49869898, 0.50299406, 0.49496043, 0.48530188, 0.50293416, 0.49803185,
# 0.4953205,  0.50209856, 0.50276744, 0.5035115,  0.5055048,  0.50706536,
# 0.50222987, 0.49028346, 0.5037042,  0.49984854, 0.5020285,  0.5025164,
# 0.49967968, 0.5110892,  0.48990026, 0.5027675,  0.49141252, 0.5059336,
# 0.5062647,  0.49534184, 0.503694,   0.50891334, 0.5016205,  0.50517523,
# 0.5072359,  0.48736116, 0.4864671,  0.48808002,  0.5009314, 0.49975392,
# 0.5059935,  0.50538224, 0.5063824,  0.48278013,  0.50118154,  0.49561825,
# 0.49332604, 0.50558573, 0.484104,   0.502745,   0.50302637,   0.49286112,
# 0.5020182,   0.489887,   0.50317776, 0.5001512, 0.48739934,   0.4559324,
# 0.5031595,  0.5028165,  0.5017987,  0.502609,  0.5112498,  0.49455214,
# 0.5031068,  0.48171195, 0.499523,  0.49368575, 0.5006978,  0.5051082,
# 0.50384295,  0.46613616, 0.50779766, 0.5008388,  0.4864724,  0.49506062,
# 0.48808193,  0.50546336, 0.4979787,  0.502298,  0.50381017,  0.5048235,
# 0.49573416,  0.5019959,  0.49022537, 0.5023204, 0.49260193,  0.49319434,
# 0.5043156,  0.4700611,  0.49612734,  0.48959896,  0.5045897, 0.48883867,
# 0.50747603, 0.49460134, 0.49711153,  0.493685,   0.5049284,  0.49300674,
# 0.48622748, 0.49405003, 0.49128786,  0.49194956, 0.49792612,  0.50133806,
# 0.50049853, 0.5010565,  0.49772614,  0.48758033, 0.5068349,   0.5052804,
# 0.5069569,  0.49935105, 0.5026514,   0.4999286,  0.49107802,  0.49915838,
# 0.49045867, 0.4901934,  0.49817118,  0.48825485, 0.46548378, 0.49999404,
# 0.50111985, 0.50017256, 0.49448094,  0.48066127, 0.49495074, 0.48833588,
# 0.50833774, 0.4940318,  0.4771501,  0.5023369, 0.50054747,  0.50373006,
# 0.5002096, 0.5011456,  0.49614954,  0.4976908,   0.4960933, 0.4989895,
# 0.4959945, 0.5035365, 0.4987446,   0.50821644,  0.50186324, 0.5119809,
# 0.49240324, 0.50117505,  0.5095214, 0.5069886,  0.49235293, 0.5056478,
# 0.48051113, 0.48982966, 0.5087101, 0.4968946,  0.503426,  0.50185853,
# 0.49679956, 0.49851108, 0.48428667, 0.49596286, 0.50738215, 0.50023836,
# 0.4951005, 0.5011748,  0.50049245,  0.50049853, 0.500693, 0.5061495,
# 0.5023285, 0.5033533,  0.50487703,  0.5044557,  0.49563888, 0.49826574,
# 0.50272065, 0.5027206, 0.50364995,  0.49409655, 0.5007849,  0.4992647,
# 0.50628424, 0.4984002, 0.50280946,  0.5005984, 0.49383536,  0.493538,
# 0.5029307, 0.50534296, 0.49892327,  0.5035424,  0.5036522,  0.51266605,
# 0.5041495, 0.48282492, 0.50302917, 0.50359327, 0.5066819, 0.45447642,
# 0.49808848, 0.4951104, 0.47762242, 0.5019187,  0.5075149,  0.4950382,
# 0.504434, 0.49908006,  0.5070942, 0.50653684, 0.48246711, 0.49946082,
# 0.49007276, 0.5049573, 0.49507475, 0.4967435,  0.48519355, 0.50444025,
# 0.48558336,  0.44874367,  0.501842, 0.4744668, 0.49879286, 0.4967848,
# 0.49376935, 0.49836177, 0.49171415, 0.44883916, 0.4864391, 0.50064373,
# 0.5038588,  0.4950754, 0.5042396, 0.50601304,  0.5037839,  0.4884956,
# 0.4831413, 0.50386137,  0.5064778,  0.50473887, 0.50836116, 0.50502676,
# 0.49635082, 0.50433356, 0.49658322, 0.48616484, 0.5015992, 0.48982614,
# 0.5027692, 0.49329066, 0.5053796, 0.50107914, 0.5067682, 0.48535264,
# 0.5026146, 0.49213096, 0.49809587, 0.48037952, 0.50175965, 0.50347143,
# 0.5028743, 0.49697924, 0.49505645, 0.5041914, 0.50639254, 0.5080645,
# 0.49058917, 0.5032401, 0.50490427, 0.50440675, 0.49100736, 0.49132612,
# 0.48686585, 0.50084776, 0.5015457, 0.48272333, 0.50819194, 0.496627,
# 0.49778825, 0.50212735, 0.49319988, 0.49681145, 0.49961218, 0.5027947,
# 0.5099531, 0.48164046, 0.50267464,  0.49723125, 0.50386965, 0.49953362,
# 0.49478123, 0.4859217, 0.46471047, 0.49687815,  0.5044782,  0.5040758,
# 0.5017781, 0.50371313, 0.5045609,  0.49722552, 0.48986727,  0.499866,
# 0.5089473, 0.5064468, 0.5031844,   0.50229955,  0.508179, 0.50270367,
# 0.50790477,  0.49538037, 0.50122523, 0.5050566, 0.50465244, 0.4803805,
# 0.5000696, 0.5058369, 0.48943248,  0.49151197,  0.5008823,  0.5036449,
# 0.5060217, 0.503986,  0.4815639,  0.46709657,   0.49864784, 0.4954916,
# 0.49594703, 0.49547893, 0.4920895, 0.4845608,   0.49548188, 0.4870656,
# 0.48362166, 0.49980852, 0.5080358, 0.50457484, 0.50787586, 0.49195808,
# 0.5083885, 0.50662404, 0.4989911, 0.49651992,  0.49928144, 0.49270144,
# 0.4993337, 0.5018806, 0.5101673,  0.5044254,  0.500261,  0.5016978,
# 0.50382113, 0.4634246, 0.50483745, 0.4815072,  0.49216738, 0.4865699,
# 0.5015991, 0.50153273, 0.49110723, 0.4647371, 0.45982188,  0.4882868,
# 0.5030091, 0.5017812, 0.50386035, 0.49650043, 0.46611673,  0.5069956,
# 0.4920813, 0.4870861, 0.49797258, 0.50395656, 0.49820086, 0.49918452,
# 0.49557197, 0.4854902, 0.5096201, 0.4941257, 0.50793356, 0.49771038,
# 0.5001148, 0.473919, 0.5025785,  0.48684466, 0.5018632]
# test_D011085 = [0.5023884,0.49955466,0.500794,0.4996387,0.49591666,0.5074945,0.50730014,0.5140812,0.50374967,
# 0.5037076,0.5036313,0.5096558,0.5094537,0.49442697,0.4988452,0.5033693,0.49791738,0.5036537,0.50042045,0.50033915,0.50414777,
# 0.5096398,0.49973786,0.49799627,0.49954194,0.50102884,0.49917033,0.50077754,0.5007969,0.5000054,0.4994951,0.5075101,0.50028044,0.5010897,
# 0.5086009,0.50565904,0.50331974,0.5063291,0.50599253,0.50217247,0.4987577,0.49912086,0.5084221,0.5077022,0.5085927,0.5026809,0.5007809,
# 0.49572498,0.4988422,0.49835643,0.5099958,0.50365245,0.50469744,0.50224143,0.50278205,0.50020736,0.50110334,0.4994975,0.49958712,0.4996298,
# 0.49968442,0.49944466,0.5006403,0.5033722,0.50320446,0.49701962,0.49951237,0.49783078,0.49983194,0.49931687,0.49922192,0.50017697,0.5012383,
# 0.4986668,0.4998419,0.5004125,0.51059616,0.5121913,0.4982996,0.49652496,0.5008995,0.49980265,0.49962416,0.4984692,0.4338595,0.50014514,0.49657482,
# 0.49891278,0.500238,0.5141963,0.5062408,0.50356525,0.49916142,0.49802122,0.49970827,0.50148034,0.49921963,0.49988574,0.4992328,0.43888173,0.44408438,
# 0.5155659,0.5054403,0.506773,0.50083244,0.50036097,0.49815983,0.49718717,0.49863482,0.49926847,0.49917567,0.43954545,0.49942452,0.4997374,0.5039947,0.5006452,0.5051264,0.5055581,0.5081108,0.49957073,0.5058898,0.498838,0.49857622,0.4984987,0.4989992,0.49794292,0.49973446,
# 0.50059134,0.50339615,0.5062235,0.5070735,0.49735016,0.50433314,0.49914888,0.49930492,0.4969554,0.49993917,0.5119423,0.5093058,0.5119448,0.5090374,
# 0.49939802,0.44020063,0.5064894,0.49947184,0.49727052,0.49755993,0.49938333,0.49918923,0.49893096,0.51334,0.4995603,0.5001274,0.49967232,0.49949342,
# 0.5025156,
# 0.5004247,
# 0.49964187,
# 0.5000782,
# 0.5001704,
# 0.49880433,
# 0.49796155,
# 0.49850655,
# 0.49906048,
# 0.5010657,
# 0.49798086,
# 0.49950817,
# 0.49944445,
# 0.49940455,
# 0.50240535,
# 0.50010043,
# 0.50183356,
# 0.5013617,
# 0.5001789,
# 0.49927,
# 0.50064677,
# 0.50243956,
# 0.4993678,
# 0.50095856,
# 0.49998027,
# 0.49869794,
# 0.4982142,
# 0.49976146,
# 0.49880147,
# 0.5004505,
# 0.5001752,
# 0.5008547,
# 0.50070876,
# 0.5074989,
# 0.50553334,
# 0.5022193,
# 0.50181997,
# 0.5039975,
# 0.5018805,
# 0.5001572,
# 0.49885792,
# 0.5063504,
# 0.5046893,
# 0.50446504,
# 0.49893174,
# 0.49961254,
# 0.49901056,
# 0.5056783,
# 0.50092125,
# 0.49812475,
# 0.501695,
# 0.49910817,
# 0.50568104,
# 0.44566196,
# 0.50967,
# 0.5011485,
# 0.49935094,
# 0.5006734,
# 0.5052476,
# 0.5015484,
# 0.50235575,
# 0.5088334,
# 0.4971951,
# 0.49891248,
# 0.49715018,
# 0.50756913,
# 0.5052487,
# 0.51003647,
# 0.49800646,
# 0.5076213,
# 0.49936122,
# 0.49953604,
# 0.49984077,
# 0.49142966,
# 0.5010712,
# 0.5002814,
# 0.49947694,
# 0.5026916,
# 0.5021081,
# 0.5071942,
# 0.5008515,
# 0.5014904,
# 0.49977154,
# 0.49925995,
# 0.4671238,
# 0.4997543,
# 0.49823692,
# 0.4994226,
# 0.4987813,
# 0.5002508,
# 0.49961945,
# 0.49881375,
# 0.49874115,
# 0.49957424,
# 0.49943244,
# 0.49922806,
# 0.51068276,
# 0.5014276,
# 0.5018014,
# 0.5225715,
# 0.5020035,
# 0.4994966,
# 0.5002598,
# 0.49913946,
# 0.50462246,
# 0.502869,
# 0.5043098,
# 0.50931406,
# 0.50103045,
# 0.49917382,
# 0.49892426,
# 0.49936548,
# 0.49913418,
# 0.4982287,
# 0.49945408,
# 0.50102586,
# 0.4979567,
# 0.5001998,
# 0.49995896,
# 0.50080884,
# 0.49910495,
# 0.5000374,
# 0.50009453,
# 0.49882188,
# 0.49930766,
# 0.49926868,
# 0.49902958,
# 0.5014624,
# 0.5004493,
# 0.49990758,
# 0.49692297,
# 0.49774036,
# 0.49873605,
# 0.50483334,
# 0.50462085,
# 0.4405088,
# 0.4990024,
# 0.49572498,
# 0.5007226,
# 0.44664955,
# 0.50213516,
# 0.5024898,
# 0.49818408,
# 0.4989773,
# 0.50112724,
# 0.4973279,
# 0.50140065,
# 0.4986913,
# 0.50081533,
# 0.5035349,
# 0.5033036,
# 0.5020358,
# 0.5006469,
# 0.5056423,
# 0.5001438,
# 0.50192136,
# 0.4997527,
# 0.5037824,
# 0.4980689,
# 0.50255406,
# 0.49697497,
# 0.49971065,
# 0.49854097,
# 0.50018984,
# 0.5024567,
# 0.4996395,
# 0.49846968,
# 0.49950096,
# 0.5018534,
# 0.5055954,
# 0.5028123,
# 0.5048741,
# 0.51354563,
# 0.5007818,
# 0.5057923,
# 0.5020104,
# 0.5063798,
# 0.510471,
# 0.52722055,
# 0.4990061,
# 0.49906358,
# 0.50056636,
# 0.49922162,
# 0.49966383,
# 0.49991396,
# 0.5023487,
# 0.4983312,
# 0.49812537,
# 0.5006032,
# 0.499113,
# 0.50021005,
# 0.49975342,
# 0.49838337,
# 0.49958983,
# 0.5004694,
# 0.500661,
# 0.5011359,
# 0.5007311,
# 0.49712595,
# 0.49860603,
# 0.4979228,
# 0.50611067,
# 0.5074217,
# 0.50576305,
# 0.49869058,
# 0.49950856,
# 0.5071954,
# 0.5059938,
# 0.4997296,
# 0.49985665,
# 0.49957222,
# 0.49809816,
# 0.49911654,
# 0.5000432,
# 0.49873224,
# 0.50306195,
# 0.49827623,
# 0.50274825,
# 0.5000412,
# 0.4978442,
# 0.49914336,
# 0.49912927,
# 0.5063227,
# 0.5088952,
# 0.49900472,
# 0.49737746,
# 0.4981033,
# 0.5058529,
# 0.49887234,
# 0.4989371,
# 0.50097257,
# 0.49888957,
# 0.502365,
# 0.5244847,
# 0.46958876,
# 0.5007238,
# 0.49858516,
# 0.50045615,
# 0.49874276,
# 0.49870172,
# 0.5102079,
# 0.5017821,
# 0.49808946,
# 0.5086211,
# 0.50074244,
# 0.49756986,
# 0.4978548,
# 0.49805608,
# 0.49826813,
# 0.4986729,
# 0.47196186,
# 0.5002539,
# 0.49814284,
# 0.49786425,
# 0.5011541,
# 0.49839887,
# 0.49854454,
# 0.49753886,
# 0.4994552,
# 0.50583297,
# 0.49704888,
# 0.5088574,
# 0.51081294,
# 0.5035804,
# 0.507541,
# 0.50378233,
# 0.5162436,
# 0.5121616,
# 0.51228243,
# 0.50349534,
# 0.49982074,
# 0.49874267,
# 0.50406456,
# 0.5049228,
# 0.49852628,
# 0.5000568,
# 0.4992895,
# 0.49813688,
# 0.5000553,
# 0.49759185,
# 0.4974286,
# 0.5000734,
# 0.4349976,
# 0.5091077,
# 0.44009507,
# 0.4989968,
# 0.49876532,
# 0.4979784,
# 0.49860564,
# 0.50842947,
# 0.5019734,
# 0.5000972,
# 0.50252485,
# 0.5041307,
# 0.5067594,
# 0.44105375,
# 0.49816534,
# 0.5208417,
# 0.51038194,
# 0.50314164,
# 0.49890915,
# 0.4989869,
# 0.49931765,
# 0.49895027,
# 0.5127475,
# 0.5084174,
# 0.5027811,
# 0.5003534,
# 0.5112429,
# 0.5085655,
# 0.5037253,
# 0.4978378,
# 0.4987308,
# 0.5014664,
# 0.50175166,
# 0.49988547,
# 0.50307846,
# 0.4995654,
# 0.49224508,
# 0.4999995,
# 0.49794012,
# 0.4989284,
# 0.49794152,
# 0.5016823,
# 0.49923003,
# 0.49992657,
# 0.43677133,
# 0.50181264,
# 0.50107867,
# 0.49722588,
# 0.5008425,
# 0.5006439,
# 0.49935287,
# 0.5001009,
# 0.4995529,
# 0.5088239,
# 0.507921,
# 0.49976435,
# 0.49935845,
# 0.4990551,
# 0.49939847,
# 0.49991387,
# 0.49950346,
# 0.5000382,
# 0.49958768,
# 0.4991819,
# 0.5010347,
# 0.49891374,
# 0.50112414,
# 0.49772677,
# 0.4991332,
# 0.5007003,
# 0.47326058,
# 0.5000438,
# 0.49963197,
# 0.50300366,
# 0.48112354,
# 0.5007663,
# 0.49826646,
# 0.500029,
# 0.49919325,
# 0.5065638,
# 0.49994174,
# 0.5052555,
# 0.5054707,
# 0.5062112,
# 0.5045799,
# 0.5045974,
# 0.50360566,
# 0.49976918,
# 0.50124633,
# 0.5019178,
# 0.5047818,
# 0.5020064,
# 0.4993143,
# 0.5099892,
# 0.50478077,
# 0.50244236,
# 0.44060352,
# 0.49847412,
# 0.49890673,
# 0.49842742,
# 0.50042486,
# 0.5003852,
# 0.4972309,
# 0.5031857,
# 0.50325084,
# 0.5005659,
# 0.49902555,
# 0.5012734,
# 0.498971,
# 0.4995047,
# 0.50891066,
# 0.5080699,
# 0.50248337,
# 0.50786954,
# 0.5000864,
# 0.49982992,
# 0.50022316,
# 0.5001804,
# 0.5001679,
# 0.49802947,
# 0.49776694,
# 0.49913284,
# 0.4994317,
# 0.49998295,
# 0.49936423,
# 0.49911612,
# 0.49913958,
# 0.49832055,
# 0.49406677,
# 0.49864322,
# 0.47434312,
# 0.49749133]

test_D016403=[0.50677305,
0.5129665,
0.51159424,
0.50385565,
0.51117504,
0.5054814,
0.5105343,
0.50322676,
0.50653493,
0.50651497,
0.5117272,
0.5071814,
0.5071314,
0.5038614,
0.51390606,
0.5075104,
0.5135032,
0.514871,
0.5089187,
0.49998772,
0.51347005,
0.51260304,
0.5135767,
0.50360304,
0.5130864,
0.5111301,
0.5105116,
0.51082665,
0.5108024,
0.51055527,
0.5060805,
0.50633276,
0.5060163,
0.5122469,
0.511439,
0.5154541,
0.5087506,
0.51258737,
0.509715,
0.509042,
0.5114511,
0.5109463,
0.50339776,
0.5043883,
0.49897683,
0.5055119,
0.5089904,
0.5051359,
0.5095636,
0.511491,
0.5120197,
0.5132958,
0.5139692,
0.5143067,
0.51412934,
0.51225233,
0.5115345,
0.49752656,
0.5110533,
0.5131486,
0.5105205,
0.51316255,
0.5135548,
0.5096603,
0.5125157,
0.51408094,
0.50830096,
0.5034345,
0.509491,
0.51349497,
0.5069816,
0.51142806,
0.5085024,
0.5099815,
0.50872326,
0.51586545,
0.5066637,
0.51351035,
0.51116526,
0.51096755,
0.51301914,
0.5135765,
0.4966207,
0.5116527,
0.5048139,
0.5107183,
0.5079725,
0.51441807,
0.5134621,
0.5040301,
0.5091856,
0.51248956,
0.5092424,
0.5176666,
0.508062,
0.50912106,
0.5124523,
0.51147085,
0.51228625,
0.50363743,
0.50801826,
0.50560325,
0.5038927,
0.51366144,
0.5078105,
0.5079671,
0.495771,
0.49722922,
0.5088043,
0.5105195,
0.51625746,
0.5040477,
0.5100088,
0.5109488,
0.51500726,
0.5081663,
0.50997114,
0.51087046,
0.50758433,
0.51227635,
0.5090572,
0.50580555,
0.50539976,
0.50592124,
0.5126334,
0.50833255,
0.51539725,
0.5091906,
0.5115856,
0.5046212,
0.5106714,
0.5142739,
0.5035887,
0.50879,
0.51390815,
0.5041778,
0.50923204,
0.5039777,
0.5048086,
0.5037747,
0.505538,
0.50214064,
0.5094781,
0.50767803,
0.5111003,
0.5120786,
0.4980438,
0.50458294,
0.51415586,
0.5019485,
0.5168365,
0.5153349,
0.5129319,
0.5140314,
0.5037286,
0.50685376,
0.5036209,
0.5090196,
0.50943035,
0.5017078,
0.5103941,
0.50878346,
0.51299214,
0.5078663,
0.5125947,
0.5110158,
0.5111046,
0.5095557,
0.50911736,
0.5134299,
0.5128993,
0.5078919,
0.50381154,
0.50776565,
0.51049966,
0.51099694,
0.50924855,
0.50949055,
0.51364756,
0.5100761,
0.51123273,
0.50242424,
0.51218194,
0.5058925,
0.5104939,
0.5137183,
0.51250815,
0.50777245,
0.4980805,
0.51020885,
0.5082262,
0.51515037,
0.5093677,
0.5040771,
0.5115242,
0.5117356,
0.5146966,
0.5103907,
0.5066004,
0.5085907,
0.51117796,
0.513312,
0.504041,
0.512323,
0.5115233,
0.50596935,
0.512916,
0.5101154,
0.5107268,
0.5150196,
0.5048303,
0.5110619,
0.5072507,
0.5127361,
0.513899,
0.51149106,
0.5141315,
0.5140698,
0.512884,
0.5143979,
0.5036793,
0.5115336,
0.51488864,
0.51182127,
0.5138102,
0.51675093,
0.50349766,
0.5087553,
0.51063997,
0.51399446,
0.51007736,
0.5060773,
0.5125485,
0.5132826,
0.5144868,
0.51351726,
0.5064064,
0.5044214,
0.5078422,
0.5096323,
0.5035394,
0.50125486,
0.51556253,
0.51532227,
0.5104755,
0.5087051,
0.5077046,
0.51080346,
0.5009065,
0.5088515,
0.50769734,
0.5059858,
0.5074886,
0.49994266,
0.5087569,
0.51020813,
0.50786996,
0.49419668,
0.50404996,
0.5104801,
0.5101549,
0.5112797,
0.5131243,
0.5143511,
0.51228064,
0.5145921,
0.51035714,
0.5066336,
0.513559,
0.5113851,
0.5108132,
0.50643224,
0.5120798,
0.50856596,
0.5093743,
0.5028525,
0.5104212,
0.5070242,
0.5069629,
0.50588334,
0.5122708,
0.5159862,
0.5114099,
0.5002927,
0.51526666,
0.5105576,
0.5150525,
0.5100921,
0.5080651,
0.5140278,
0.5125439,
0.5100812,
0.5051359,
0.50362265,
0.5139869,
0.5126541,
0.504522,
0.5117867,
0.5116691,
0.50032043,
0.5130728,
0.50989956,
0.51043415,
0.51196307,
0.50886285,
0.5106808,
0.50820565,
0.50831383,
0.5106349,
0.5058949,
0.49856597,
0.5085233,
0.5133531,
0.513984,
0.5106531,
0.515749,
0.51146525,
0.5038537,
0.5080433,
0.5089574,
0.50894535,
0.5103425,
0.5084927,
0.4994194,
0.5155077,
0.5102776,
0.5071109,
0.5144358,
0.51298165,
0.5008443,
0.50751704,
0.5059822,
0.51017636,
0.51061875,
0.508212,
0.51173997,
0.51238126,
0.51161027,
0.51253897,
0.5074576,
0.50836796,
0.50776744,
0.5139516,
0.5063829,
0.50880593,
0.5076145,
0.50713897,
0.5104528,
0.51145625,
0.5108186,
0.5111091,
0.5107502,
0.51122195,
0.5138121,
0.5112821,
0.51160175,
0.51506364,
0.50418186,
0.51158005,
0.5122097,
0.5133823,
0.50800705,
0.51142865,
0.5118653,
0.5083643,
0.5147889,
0.51534045,
0.50156826,
0.5125485,
0.5028803,
0.5109177,
0.5051982,
0.51148796,
0.5064979,
0.512766,
0.5122949,
0.51378787,
0.5145207,
0.50097865,
0.5101964,
0.5005998,
0.5098042,
0.5126714,
0.50489175,
0.51079804,
0.50996417,
0.5079672,
0.51273,
0.5089478,
0.49612308,
0.50832814,
0.51143146,
0.50925475,
0.5116677,
0.5124315,
0.5096749,
0.5106093,
0.5087203,
0.5110515,
0.51601213,
0.5054159,
0.5125696,
0.5154716,
0.5086754,
0.5119911,
0.5124807,
0.5120326,
0.50836176,
0.5132568,
0.5131673,
0.5007069,
0.5052769,
0.5151246,
0.5019069,
0.5057142,
0.50985587,
0.51222867,
0.51467884,
0.5047475,
0.50366324,
0.51343185,
0.51261026,
0.51517195,
0.5151267,
0.50907737,
0.51016796,
0.5098148,
0.5031831,
0.50345314,
0.5099976,
0.51203406,
0.51637465,
0.5117794,
0.51330316,
0.5068936,
0.5055321,
0.5115437,
0.5180979,
0.51108253,
0.50339407,
0.5130721,
0.5120467,
0.5092049,
0.5094827,
0.5134724,
0.5116031,
0.5149614,
0.5037338,
0.51339674,
0.51260066,
0.5054457,
0.51275074,
0.51716334,
0.5150582,
0.5106988,
0.5029679,
0.516057,
0.5113179,
0.5138412,
0.51158386,
0.5134974,
0.5138385,
0.51231265,
0.5070874,
0.5124686,
0.51115334,
0.5031847,
0.5089186,
0.5041943,
0.5143915,
0.5115397,
0.50424325,
0.50486934,
0.4916179,
0.50345266,
0.5085068,
0.51453674,
0.49659875,
0.51466703,
0.50646496,
0.5139366,
0.5074076,
0.51473045,
0.5093439,
0.5088275,
0.5091223,
0.5120833,
0.5120672,
0.5038084,
0.51067936,
0.51040727,
0.5107696,
0.51373523,
0.5096501,
0.5069595,
0.5111717,
0.51198125,
0.5138249,
0.5099894,
0.506108,
0.5097062,
0.50891644,
0.50890625,
0.50935835,
0.508153,
0.5105081,
0.51208574,
0.5067855,
0.5069622,
0.51657766,
0.51450264,
0.5138507,
0.50597644,
0.5137748,
0.51215166,
0.50318944,
0.50556827,
0.5086901,
0.5117864,
0.5147955,
0.5109645,
0.5144177,
0.5118334,
0.5110854,
0.5094494,
0.51032466,
0.50633824,
0.5048107,
0.5126141,
0.5098532,
0.5055934,
0.51394606,
0.5109655,
0.51072246,
0.5103359,
0.5130456,
0.5089217,
0.5105883,
0.5135887,
0.5141861,
0.5108962,
0.51046544,
0.5094105,
0.5065749,
0.50893587,
0.5138701,
0.5043889,
0.5143593,
0.51562804,
0.5007618,
0.5017261,
0.5131441,
0.5086645,
0.5124313,
0.51380736,
0.50877774,
0.51140124,
0.50793266,
0.5120648,
0.51145893,
0.5112482,
0.5081933,
0.5135033,
0.50988096,
0.5113797,
0.50489813,
0.514202,
0.5076202,
0.51276624]
print(len(test_D016403))
def findTopNindex(arr,N):
    return np.argsort(arr)[::-1][:N]

# test = np.array([19.0, 5.0, 9.0, 1.0])
print(findTopNindex(test_D016403,20))


#
# # s = tf.Session()
# # u =[ [0, 3.069711,  3.167817, 1.09],[1.0, 9.0,4.0,16.0] ]
# # u1 = tf.nn.top_k(u,2)
# # u2 = s.run(u1)
#
#
# # def topk(inputs, k):
# #     import heapq
# #     if inputs == None or len(inputs) < k or len(inputs) <= 0 or k <= 0:
# #         return []
# #     output = []
# #     for number in inputs:
# #         if len(output) < k:
# #             output.append(number)
# #         else:
# #             output = heapq.nlargest(k, output)
# #             if number >= output[0]:
# #                 continue
# #             else:
# #                 output[0] = number
# #     return output[::-1]
# #
# # inputs = [4,5,1,6,2,7,3,8]
# # print(topk(inputs, 3))
#
#
# # #assume that an array has 0, 3.069711,  3.167817.
# # array1 = [[0, 1,  2],[3,4,5]]
# # array2 = [[6, 1,  3],[8,6,6]]
# # a = tf.concat([array1, array2], 0)
# # aa = tf.concat([[array1], [array2]], 0)
# # aa = tf.slice(a,[0,0],[2,-1])
# # aa = tf.slice(a,[2,0],[2,-1])
# # aeqb = tf.equal(a,b)
# # aeqb_int = tf.to_int32(aeqb)
# # aeqb = tf.add(a,tf.constant(1,shape=a.shape))
# # sess = tf.Session()
# # a0 = tf.constant([0, 0, 0, 0, 0, 0])
# # a = tf.zeros_like(a0)
# # a=tf.Variable([0, 0, 0, 0, 0, 0])
# # b = tf.constant([1,2,4])
# # c=tf.constant([1,1,1])
# #
# # a = tf.Variable([1, 2, 3, 4, 5, 6, 7, 8])
# # ref = tf.zeros_like(a)
# # ref = tf.Variable(ref)
# # indices = tf.constant([[4], [3], [1], [7]])
# # updates = tf.constant([9, 10, 11, 12])
# # update = tf.scatter_nd_update(ref, indices, updates)
#
#
#
# # A = [0,3,4,5,6]
# # B = [7,3,4,1,1]
# # bb=[3,4]
# # out = tf.nn.in_top_k(A,bb,2)
# # with tf.Session() as sess:
# #     tf.global_variables_initializer().run()
# #     bbb= sess.run(out)
#
# # c = tf.equal(A, B)
# #
# # B = tf.cast(B, tf.int32)
# # index66 = tf.where(tf.equal(B,1))
# # index =tf.squeeze(index66)
# # index = tf.to_int32(index)
# #
# # location_idx = tf.nn.top_k(A,2)[1]
# #
# # location_idx_sorted = tf.nn.top_k(location_idx,2)[0]
# # index_sorted = tf.nn.top_k(index,2)[0]
# #
# # ttt=tf.equal(location_idx,index)
# # isequal = tf.reduce_all(ttt)
# # tt = tf.to_int32(isequal)
#
#
# # a=[0.6, 0.1, 0.2 ,0.05, 0.5, 0.1, 0.3]
# # b=[1,     0,   1,    1,   0,   0,    1]
# # b = tf.cast(b, tf.int32)
# #
# # index11 = tf.where(tf.equal(b,1))
# # index = tf.squeeze(index11)
# # zz = tf.reshape(index,[-1])
# # sum = tf.reduce_sum(b)
# # sum = tf.cast(sum,tf.int32)
# # location_idx = tf.nn.top_k(a,sum)[1]    #,False
# # location_idx = tf.cast(location_idx,tf.int64)
# #
# # location_idx_sorted = tf.nn.top_k(location_idx,sum)[0]
# # index_sorted = tf.nn.top_k(index,sum)[0]
# #
# # test = tf.nn.top_k(index, sum)
# # isequal = tf.reduce_all(tf.equal(location_idx_sorted,index_sorted))
# # z = tf.to_int32(isequal)
# #
# # with tf.Session() as sess:
# #     tf.global_variables_initializer().run()
# #     d12 = sess.run(zz)
# #     d=sess.run(index)
# #     d0=sess.run(location_idx)
# #     d11 = sess.run(index_sorted)
# #     d22 = sess.run(location_idx_sorted)
# #
# #     d1 = sess.run(isequal)
# #     d2 = sess.run(z)
# #     d2_test = sess.run(test)
# # # aaa0= sess.run(update)
# #
# #
# # print("aa")
#
#
#
#
# # mask = tf.greater(array, 0)
# # non_zero_array = tf.boolean_mask(array, mask)
# #
# # inputs=[[0.,2.,3.],[4.,0.,6.]]
# # inputs = tf.reshape(inputs,(1,-1))
# # mask = tf.greater(inputs,0.0)
# # non_zero_array = tf.boolean_mask(inputs, mask)
# # sha = non_zero_array.get_shape().as_list()
# # ssha = tf.shape(non_zero_array)
# # tt = sess.run(inputs)
# # tt = sess.run(mask)
# # tt0= sess.run(non_zero_array)
# # tt0= sess.run(ssha)
# #
# # # normA = tf.linalg.norm(a)
# # # normB = tf.linalg.norm(b)
# # # t = tf.reduce_sum(b)
# # # c = sum / ((normA * normB))
# # # normB = tf.where( tf.is_nan(normB), c, normB )
# # # c = tf.where( tf.equal(t,0.0), 0.000001, sum/(normA * normB) )
# # # c = tf.where(tf.equal(c, c), c, tf.zeros_like(c))
# # # c = sum / ((normA * normB))
# #
# # # r_labels = tf.where( tf.is_nan(r_preds), tf.zeros_like(r_preds), tf.ones_like(r_preds) )
# # # r_preds = tf.where( tf.is_nan(r_preds), tf.zeros_like(r_preds), r_preds )
# # # r_preds = tf.clip_by_value(r_preds, 1e-10, 1.0)
# # # r_preds = tf.reshape(r_preds,[dim, 1])
# # # r_labels = tf.ones_like(r_preds)
# # # r_labels = tf.ones([dim, 1], dtype="float")
# #
