import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from onnx_tf.backend import prepare
# import tensorflow as tf
# import onnx
# class PointNetwork(nn.Module):
#     def __init__(self):
#         super(PointNetwork, self).__init__()
#         self.fc1 = nn.Linear(210, 64)  # 输入层，将 21x3 的数据展平
#         self.fc2 = nn.Linear(64, 32)  # 隐藏层
#         self.fc3 = nn.Linear(32, 10)  # 输出层，假设有 10 个类别

#     def forward(self, x):
#         x = x.view(-1, 210)  # 将输入数据展平
#         x = F.relu(self.fc1(x))  # 使用 ReLU 激活函数
#         x = F.relu(self.fc2(x))
#         x = self.fc3(x)  # 输出层，不需要激活函数
#         return x

# # 定义设备 (GPU 或 CPU)
# device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# # 创建网络实例
# model = PointNetwork().to(device)
# # 打印网络结构
# model = torch.load("points-v5.pth", map_location=torch.device('cpu'))
# dummy_input = torch.randn(1,210)
# torch.onnx.export(model, dummy_input, "converted.onnx", verbose=True, input_names=['input'], output_names=['output'])
# TF_PATH = "converted.pb" # where the representation of tensorflow model will be stored
# ONNX_PATH = "converted.onnx" # path to my existing ONNX model
# onnx_model = onnx.load(ONNX_PATH)  # load onnx model
# model_name = "model"
# # prepare function converts an ONNX model to an internel representation
# # of the computational graph called TensorflowRep and returns
# # the converted representation.
# tf_rep = prepare(onnx_model)  # creating TensorflowRep object

# # export_graph function obtains the graph proto corresponding to the ONNX
# # model associated with the backend representation and serializes
# # to a protobuf file.
# tf_rep.export_graph(TF_PATH)
# # Convert the model
# converter = tf.lite.TFLiteConverter.from_saved_model(TF_PATH) # path to the SavedModel directory
# converter.target_spec.supported_ops = [
#   tf.lite.OpsSet.TFLITE_BUILTINS, # enable TensorFlow Lite ops.
#   tf.lite.OpsSet.SELECT_TF_OPS # enable TensorFlow ops.
# ]
# tflite_model = converter.convert()

# # Save the model.
# with open(model_name +".tflite", "wb") as f:
#   f.write(tflite_model)