# coding=utf-8
# https://www.mindspore.cn/tutorials/zh-CN/master/advanced/static_graph_expert_programming.html

# 使用HyperMap
import time
from mindspore.ops import MultitypeFuncGraph, HyperMap
from mindspore import ops, Tensor
import mindspore as ms

add = MultitypeFuncGraph('add')


@add.register("Tensor", "Tensor")
def add_Tensor(x, y):
    return ops.add(x, y)


add_map = HyperMap(add)
list1 = [Tensor(i) for i in range(200)]
list2 = [Tensor(i) for i in range(200)]


@ms.jit
def hyper_map_net():
    output = add_map(list1, list2)
    return output


start_time = time.time()
output = hyper_map_net()
end_time = time.time()
print("hyper map cost time:", end_time - start_time)


@ms.jit
def for_loop_net():
    out = []
    for i in range(200):
        out.append(i + i)
    return out


start_time = time.time()
for_loop_net()
end_time = time.time()
print("for loop cost time:", end_time - start_time)

# 使用编译缓存
import time
from mindspore import set_context
from mindspore import dtype
import mindspore as ms


@ms.jit
def func(input_x, input_y):
    output = input_x
    for _ in range(200):
        output = input_x + input_x * input_y + output
    return output


set_context(enable_compile_cache=False)
x = ms.Tensor([1], dtype.float32)
y = ms.Tensor([2], dtype.float32)
start_time = time.time()
out = func(x, y)
end_time = time.time()
print("Disable comile_cache cost time:", end_time - start_time)

import time
from mindspore import set_context
from mindspore import dtype
import mindspore as ms


@ms.jit
def func(input_x, input_y):
    output = input_x
    for _ in range(200):
        output = input_x + input_x * input_y + output
    return output


set_context(enable_compile_cache=True, compile_cache_path="my_compile_cache")
x = ms.Tensor([1], dtype.float32)
y = ms.Tensor([2], dtype.float32)
start_time = time.time()
out = func(x, y)
end_time = time.time()
print("Enable comile_cache cost time:", end_time - start_time)

# 除使用修饰器外，也可使用函数变换方式调用jit方法，示例如下：
import numpy as np
import mindspore as ms
from mindspore import nn, Tensor


class Network(nn.Cell):
    def __init__(self):
        super().__init__()
        self.flatten = nn.Flatten()
        self.dense_relu_sequential = nn.SequentialCell(
            nn.Dense(28 * 28, 512),
            nn.ReLU(),
            nn.Dense(512, 512),
            nn.ReLU(),
            nn.Dense(512, 10)
        )

    def construct(self, x):
        x = self.flatten(x)
        logits = self.dense_relu_sequential(x)
        return logits


input = Tensor(np.ones([64, 1, 28, 28]).astype(np.float32))

from mindspore import JitConfig
jitconfig = JitConfig(jit_level="O1")

def run(x):
    model = Network()
    model.set_jit_config(jitconfig)
    return model(x)



run_with_jit = ms.jit(run)  # 通过调用jit将函数转换为以静态图方式执行
output = run(input)
print(output)
