Jiahang's picture
update script
aefff9b
import json
import gradio as gr
from nn_meter import load_latency_predictor
cortexA76cpu_predictor = load_latency_predictor("cortexA76cpu_tflite21")
adreno640gpu_predictor = load_latency_predictor("adreno640gpu_tflite21")
adreno630gpu = load_latency_predictor("adreno630gpu_tflite21")
myriadvpu_predictor = load_latency_predictor("myriadvpu_openvino2019r2")
predictor_map = {
"cortexA76cpu_tflite21": cortexA76cpu_predictor,
"adreno640gpu_tflite21": adreno640gpu_predictor,
"adreno630gpu_tflite21": adreno630gpu,
"myriadvpu_openvino2019r2": myriadvpu_predictor
}
feature_for_kernel = {
# remove the last two float
"conv": ["HW", "CIN", "COUT", "KERNEL_SIZE", "STRIDES"],
"dwconv": ["HW", "CIN", "COUT", "KERNEL_SIZE", "STRIDES"],
"fc": ["CIN", "COUT"],
# support up to 4 cin, if less than 4, the latter cin will be set to 0
"concat": ["HW", "CIN1", "CIN2", "CIN3", "CIN4"],
#
"maxpool": ["HW", "CIN", "COUT", "KERNEL_SIZE", "POOL_STRIDES"],
"avgpool": ["HW", "CIN", "COUT", "KERNEL_SIZE", "POOL_STRIDES"],
"split": ["HW", "CIN"],
"channelshuffle": ["HW", "CIN"],
"se": ["HW", "CIN"],
"global-avgpool": ["HW", "CIN"],
"bnrelu": ["HW", "CIN"],
"bn": ["HW", "CIN"],
"hswish": ["HW", "CIN"],
"relu": ["HW", "CIN"],
"addrelu": ["HW", "CIN1", "CIN2"],
"add": ["HW", "CIN1", "CIN2"],
}
def get_type(str):
operate_type = str.split("-")[0]
if operate_type == 'global' or operate_type == 'gap':
operate_type = 'global-avgpool'
return operate_type
def get_configuration(operate_type, value_arr):
feature_arr = feature_for_kernel[operate_type]
if operate_type == 'concat':
configuration_arr = []
for i in range(len(feature_arr)):
if value_arr[i] != 0:
configuration_arr.append(feature_arr[i]+"="+str(value_arr[i]))
else:
break
else:
configuration_arr = [feature_arr[i]+"="+str(value_arr[i]) for i in range(min(len(feature_arr),len(value_arr)))]
return ', '.join(configuration_arr)
def data_process(data):
new_data = []
for item in data:
operate_type = get_type(item[1])
new_item = {
"order": item[0],
"type": operate_type,
"configuration": get_configuration(operate_type, item[2]),
"latency": item[3],
"name": item[4],
}
new_data.append(new_item)
return new_data
def generate_html(hardware, latency, block_detail):
data = data_process(block_detail)
doc = """<html>
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<meta name="viewport" content="width=device-width,
initial-scale=1.0, maximum-scale=1.0, user-scalable=no" />
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.2.0-beta1/dist/css/bootstrap.min.css" rel="stylesheet">
<link href="https://unpkg.com/bootstrap-table@1.20.2/dist/bootstrap-table.min.css" rel="stylesheet">
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.8.3/font/bootstrap-icons.css">
<style>
html {
font-family: sans-serif;
padding: 5px;
}
body {
padding: 10px;
font-size: 0.875rem;
}
#dataviz {
width: 100%;
height: 300px;
position: relative;
}
#toolbar {
margin-top: 10px;
margin-bottom: 15px;
display: flex;
align-items: center;
}
input[type="number"]:focus-visible {
outline: none;
}
.bootstrap-table .fixed-table-container .fixed-table-body {
height: auto;
}
</style>
</head>
<body>
<h4 style="font-size: 1.5rem">Latency Analysis <i class="bi bi-question-circle" data-bs-container="body" data-bs-toggle="popover" data-bs-placement="right" style="font-size:1.2rem;"></i></h4>
<div id="popoverInfo" style="display: none">
The latency results are empowered by Microsoft nn-Meter. For more technical details, please refer to the paper: <a href="https://dl.acm.org/doi/abs/10.1145/3529706.3529712" target="_blank">nn-METER: Towards Accurate Latency Prediction of DNN Inference on Diverse Edge Devices</a>.
</div>
<div id="toolbar">
<div style="display: flex;align-items: center;">
<span>Group By: </span>
<select class="form-select" id="inputGroupBy" style="width: fit-content;margin-left: 5px;">
<option value="type">Operator Type</option>
<option value="name">None</option>
</select>
</div>
<div style="margin-left: 45px;margin-top:6px;display: flex;align-items: center;">
<div><label><input type="radio" name="quantity" value="all" class="quantity" checked> Show all</label></div>
<div style="margin-left: 10px;">
<label><input type="radio" name="quantity" value="top" class="quantity"> Show top</label>
<input type="number" value="10" min="1" style="width: 50px; border: none;
border-bottom: 1px #aaa solid;" id="quantityNumber" disabled>
</div>
</div>
</div>
<div style="display: flex;">
<div id="dataviz"> </div>
</div>
<table id="table" data-search="true" data-search-align="left" data-pagination="true" data-page-size="30" data-page-list="[10, 20, 30, 50, 100, all]">
<thead>
<tr>
<th data-field="order" data-sortable="true">Excution Order</th>
<th data-field="type" data-sortable="true">Operator Type</th>
<th data-field="configuration">Configuration</th>
<th data-field="latency" data-sortable="true">Latency (ms)</th>
<th data-field="name" width="20%" data-sortable="true">Detail Operator</th>
</tr>
</thead>
</table>
<script src="https://cdn.jsdelivr.net/npm/echarts@5.3.3/dist/echarts.min.js" type="text/javascript"></script>
<script src="https://cdn.jsdelivr.net/npm/jquery/dist/jquery.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.2.0-beta1/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://unpkg.com/bootstrap-table@1.20.2/dist/bootstrap-table.min.js"></script>
</body>
<script>
""" + f"""let rawData = {str(data).replace("'", '"')};""" + """
rawData.forEach(item => {
item.name = item.name.split(";").join("; ");
item.latency = Number(item.latency) ? Number(item.latency) : item.latency;
})
// table
let $table = $("#table");
$(function () {
$table.bootstrapTable({ data: rawData })
})
// visualization
const chartDom = document.getElementById("dataviz");
let myChart = echarts.init(chartDom);
Array.prototype.groupBy = function (key) {
return this.reduce(function (rv, x) {
(rv[x[key]] = rv[x[key]] || []).push(x);
return rv;
}, {});
};
function processData(rawData, groupBy, quantity) {
// transform data
let seriesData = Object.entries(rawData.groupBy(groupBy)).map(([name, arr]) => {
const value = arr.reduce((sum, curr) => sum + curr.latency, 0);
const type = arr[0].type;
return { name, value, type }
})
.sort((a, b) => {
return b.value - a.value
});
if (quantity) {
seriesData = seriesData.slice(0, quantity);
}
return {
seriesData,
legendData: seriesData.filter(d => Number(d.value)).map(d => d.name)
};
}
function formatNumber(num, fixed = 2){
if(Number(num.toFixed(fixed)) > 0){
return num.toFixed(fixed);
}else{
return num.toPrecision(1);
}
}
function render(data, groupBy) {
const sum = data.seriesData.reduce(function (prev, current) {
return prev + (Number(current.value) ? Number(current.value) : 0)
}, 0);
let option = {
title: {
text: """ + f"""`Total latency is {format(latency, '.4f')}(ms)`,
subtext: `on Hardware {hardware}`,"""+"""
left: "left",
textStyle:{
fontSize: 14
}
},
tooltip: {
trigger: "item",
formatter: (params) => groupBy==="name"? `<i>type:</i> ${params.data.type}<br><i>detail:</i> ${params.data.name}<br><b>${formatNumber(params.data.value)}</b><br><b>(${formatNumber(params.data.value / sum * 100)}%)</b>` : `${params.data.name}<br><b>${formatNumber(params.data.value)}</b><br><b>(${formatNumber(params.data.value / sum * 100)}%)</b>`,
extraCssText: "max-width: 400px; white-space: break-spaces;"
},
legend: {
type: "scroll",
orient: "vertical",
right: "10%",
top: "12%",
bottom: "12%",
data: data.legendData,
formatter: (name) => {
let arr = name.split(";");
return arr.length === 1 ? name : (arr[0]+"...");
},
tooltip: {
show: true,
formatter: (params) => {
let datum = data.seriesData.find(d => d.name === params.name);
return groupBy==="name"? `<i>type:</i> ${datum.type}<br><i>detail:</i> ${datum.name}<br><b>${formatNumber(datum.value)}</b><br><b>(${formatNumber(datum.value / sum * 100)}%)</b>` :`${datum.name}<br><b>${formatNumber(datum.value)}</b><br><b>(${formatNumber(datum.value / sum * 100)}%)</b>`
},
position: (point, params, dom, rect, { contentSize, viewSize }) => [viewSize[0] * 0.4 - contentSize[0] * 0.5, viewSize[1] * 0.5 - contentSize[1] * 0.5]
}
},
series: [
{
type: "pie",
radius: ["40%", "75%"],
center: ["40%", "50%"],
data: data.seriesData,
emphasis: {
itemStyle: {
shadowBlur: 10,
shadowOffsetX: 0,
shadowColor: "rgba(0, 0, 0, 0.5)"
}
}, label: {
formatter: "{d}%",
position: "inside",
color: "#fff",
},
}
],
color: ["#4e79a7", "#f28e2c", "#e15759", "#76b7b2", "#59a14f", "#edc949", "#af7aa1", "#ff9da7", "#9c755f", "#bab0ab"]
};
myChart.dispose();
myChart = echarts.init(chartDom);
myChart.setOption(option);
myChart.on("selectchanged", function(params){
const index = params.fromActionPayload.dataIndexInside;
const text = data.seriesData[index].name;
$table.bootstrapTable("resetSearch", text);
});
myChart.on("legendselectchanged", function(params) {
suppressSelection(myChart, params);
});
function suppressSelection(chart, params) {
chart.setOption({ animation: false });
// Re-select what the user unselected
chart.dispatchAction({
type: "legendSelect",
name: params.name
});
chart.setOption({ animation: true });
}
}
// config
let groupBy = "type";
let quantityNumber = 10;
let showAll = true;
render(processData(rawData, groupBy), groupBy);
function redraw() {
render(processData(rawData, groupBy, showAll ? null : quantityNumber), groupBy);
}
// change groupby
document.getElementById("inputGroupBy")
.addEventListener("change", function () {
groupBy = this.value;
redraw();
});
// change the model of show
function changeShowModel() {
if (this.value === "top") {
document.getElementById("quantityNumber").disabled = false;
showAll = false;
} else {
document.getElementById("quantityNumber").disabled = true;
showAll = true;
}
redraw();
}
let items = Object.values(document.getElementsByClassName("quantity"))
.forEach(item => item.addEventListener("change", changeShowModel));
// change the number of show
document.getElementById("quantityNumber")
.addEventListener("change", function () {
quantityNumber = this.value;
redraw();
})
// enable popover
const popoverTriggerList = document.querySelectorAll(`[data-bs-toggle="popover"]`)
const popoverList = [...popoverTriggerList].map(popoverTriggerEl => new bootstrap.Popover(popoverTriggerEl, {
html : true,
content: function() {
return $("#popoverInfo").html();
}
}));
</script>
</html>
"""
return f"""<iframe style="width: 100%; height: 480px" name="result" allow="midi; geolocation; microphone; camera; display-capture; encrypted-media;" sandbox="allow-modals allow-forms allow-scripts allow-same-origin allow-popups allow-top-navigation-by-user-activation allow-downloads" allowfullscreen="" allowpaymentrequest="" frameborder="0" srcdoc='{doc}'></iframe>"""
def generate_error_html(massage):
return f"""<div style="color:#842029;background: #f8d7da;padding: 10px;border-radius: 10px; margin-top: 15px;"><b>nn-meter meets an error in latency prediction</b>: {massage}</div>
<div style="padding: 10px;">If you have any questions about the result, you can open new issues in <a href="https://github.com/microsoft/nn-Meter" target="_blank" style="color:#2563eb">nn-meter Git repository</a>.</div>
"""
def get_latency(model, hardware_name):
if model == None:
return generate_error_html("Please upload a model file or select one example below.")
model = model.name
if hardware_name == '':
return generate_error_html("Please select a device.")
predictor = predictor_map[hardware_name]
if model.endswith("onnx"):
model_type = "onnx"
elif model.endswith("pb"):
model_type = "pb"
else:
model_type = "nnmeter-ir"
try:
model_latency, block_detail = predictor.detailed_predict(model, model_type)
return generate_html(hardware_name, model_latency, block_detail)
except Exception as e:
return generate_error_html(repr(e))
title = "Interactive demo: nn-Meter (Draft Version)"
description = "Demo for Microsoft's nn-Meter, a novel and efficient system to accurately predict the inference latency of DNN models on diverse edge devices. To use it, simply upload a model file, or use one of the example below and click ‘submit’. Results will show up in a few seconds."
article = "<p style='text-align: center'><a href='https://dl.acm.org/doi/10.1145/3458864.3467882'>nn-Meter: towards accurate latency prediction of deep-learning model inference on diverse edge devices</a> | <a href='https://github.com/microsoft/nn-Meter'>Github Repo</a></p>"
examples =[
["samples/mobilenetv3small_0.pb", "cortexA76cpu_tflite21"],
["samples/mobilenetv3small_0.onnx", "adreno640gpu_tflite21"],
["samples/mobilenetv3small_0.json", "adreno630gpu_tflite21"]
]
inputs = [
gr.inputs.File(label="Model File"),
gr.inputs.Radio(choices=["cortexA76cpu_tflite21", "adreno640gpu_tflite21", "adreno630gpu_tflite21", "myriadvpu_openvino2019r2"], label="Device"),
]
outputs = gr.outputs.HTML()
iface = gr.Interface(fn=get_latency,
inputs=inputs,
outputs=outputs,
title=title,
description=description,
article=article,
examples=examples,
allow_flagging="auto",
css="""
div[id="6"] {
flex-direction: column;
}
div[id="12"] {
margin-left: 0px !important;
margin-top: 0.75em !important;
}
div[id="12"] iframe{
height: 80vh !important;
}
""")
iface.launch()