哈尔滨企业制作网站,站长工具seo综合查询可以访问,wordpress内存不足,大望路网站制作某些模型最后卷积层之后的算子不适合在推理引擎里面跑#xff0c;切割掉conv后面的算子#xff0c;在cpu上实现有比较好的性能#xff0e;
包含#xff1a;
#xff11;#xff0e;获取onnx中间节点的shape的示例
#xff12;#xff0e;增加onnx模型输出#xff0c;设…某些模型最后卷积层之后的算子不适合在推理引擎里面跑切割掉conv后面的算子在cpu上实现有比较好的性能
包含
获取onnx中间节点的shape的示例
增加onnx模型输出设置名称type, shape. 示例
编辑onnx模型示例切割掉绿色部分示例
import onnx
import sys
import json
from onnx import shape_inference, TensorProtoif len(sys.argv) 2:print(Usage: sys.argv[0] onnx_filename)exit(-1)onnx_file sys.argv[1]# 加载ONNX模型
model onnx.load(onnx_file)graph model.graphoutputs model.graph.output
if(len(outputs)!3):print(This isnt ScoreBoxKpt model!)quit()output_list[output0,output1,output2]for output in outputs:if output.name in score_box_kpt :print(foutput name: {output.name})else:print(This isnt a fit model!)quit()def getConvList(endName):stack[]stack.append(endName)convList[]while(len(stack)):namestack.pop()for node in graph.node:if name in node.output :if node.op_typeConv:if node.name not in convList :convList.append(node.name)else: for input in node.input:if input not in stack:stack.insert(0, input)return convListConv0getConvList(output_list[0])
Conv1getConvList(output_list[1])
Conv2getConvList(output_list[2])def save2json(save_dict, name):if len(save_dict) 0:print(this is nothing to save json)return Nonewith open(name, w) as fp:#{a: Runoob, b: 7}json.dump(save_dict, fp, sort_keysFalse, indent4, separators(,, : )) #defaultstrsave_dict {output_list[0]:scoreConv,output_list[1]:boxConv,output_list[2]:kptConv}conv_listConv0Conv1Conv2获取onnx中间节点的shape.
output_dim_dic{}
inferred_onnx_model shape_inference.infer_shapes(model)
inferred_graph inferred_onnx_model.graph
inferred_value_info inferred_graph.value_info
for node in graph.node:if node.name in conv_list:for value_info in inferred_value_info:if value_info.namenode.output[0]:output_dim_dic[node.name]value_info.type.tensor_type;#删除conv 后面的onnx节点
# Find target node index
for name in conv_list:target_node Nonefor node in graph.node:if node.name name:target_nodenodebreakoutput_names []for output in target_node.output:output_names.append(output)set1set(output_names)del_node []have_new_del_node Falsewhile True:have_new_del_node Falsefor node in graph.node:if node.name in del_node:continueset2set(node.input)if set1.intersection(set2): output_namesnode.output set1set(output_names)del_node.append(node.name)have_new_del_node Trueif have_new_del_node False:breakfor node in graph.node:if node.name in del_node:print(f1remove node {node.name})model.graph.node.remove(node)have_new_del_node False
while True:have_new_del_node Falsefor node1 in graph.node:if node1.name in conv_list :continueset1set(node1.output)to_delete Truefor node2 in graph.node:set2set(node2.input)if set1.intersection(set2): to_delete Falsebreakif to_delete True:print(f2remove node {node1.name})model.graph.node.remove(node1)have_new_del_nodeTrueif have_new_del_node False :breaksave_output_name[]
for node in graph.node:if node.name in conv_list:增加输出层output_info onnx.helper.ValueInfoProto()node.output[0]node.nameoutput_info.name node.output[0]for dim_value in output_dim_dic[node.name].shape.dim:output_info.type.tensor_type.shape.dim.extend([dim_value])output_info.type.tensor_type.elem_type TensorProto.FLOATprint(output_info)graph.output.extend([output_info])save_output_name.append(node.output[0])outputs model.graph.output
# 打印输出节点名称
for output in outputs:if output.name in save_output_name :continuemodel.graph.output.remove(output)
outputs model.graph.output
# 打印输出节点名称
for output in outputs:if output.name in save_output_name :continuemodel.graph.output.remove(output)
# Save modified ONNX model
onnx.checker.check_model(model)
onnx.save(model, backbone.onnx)
save2json(save_dict, conv_param.json