import h5py
import sys
sys.path.append(".")
import pandas as pd
file_path = "model_zoo/weights/inception_v3_weights_tf_dim_ordering_tf_kernels.h5"
def load_attributes_from_hdf5_group(group, name):
  """Loads attributes of the specified name from the HDF5 group.

  This method deals with an inherent problem
  of HDF5 file which is not able to store
  data larger than HDF5_OBJECT_HEADER_LIMIT bytes.

  Arguments:
      group: A pointer to a HDF5 group.
      name: A name of the attributes to load.

  Returns:
      data: Attributes data.
  """
  if name in group.attrs:
    data = [n.decode('utf8') for n in group.attrs[name]]
  else:
    data = []
    chunk_id = 0
    while '%s%d' % (name, chunk_id) in group.attrs:
      data.extend(
          [n.decode('utf8') for n in group.attrs['%s%d' % (name, chunk_id)]])
      chunk_id += 1
  return data
with h5py.File(file_path,"r") as f:
    # 1. read layer names
    layer_names = load_attributes_from_hdf5_group(f, 'layer_names')
    '''
    for layer_name in layer_names:
        print(layer_name)
    '''
    result = {"layer_name":[],"weight_name":[]}
    for k, name in enumerate(layer_names):
        g = f[name]
        weight_names = load_attributes_from_hdf5_group(g, 'weight_names')
        # weight_values = [np.asarray(g[weight_name]) for weight_name in weight_names]
        #print(name,weight_names)
        result["layer_name"].append(name)
        result["weight_name"].append(weight_names)
    result_pd = pd.DataFrame(result)
    result_pd.to_excel("inception_h5_weight_info.xlsx")
