某些模型最后卷积层之后的算子不适合在推理引擎里面跑,切割掉conv后面的算子,在cpu上实现有比较好的性能. 包含: 1.获取onnx中间节点的shape的示例 2.增加onnx模型输出,设置名称,type, shape. 示例 3.编辑onnx模型示例切割掉绿色部分示例 import onnx import sys import json from onnx import shape_inference, TensorProtoif len(sys.argv) < 2:print('Usage: ' + sys.argv[0] + '<onnx_filename>')exit(-1)onnx_file = sys.argv[1]# 加载ONNX模型 model = onnx.load(onnx_file)graph = model.graphoutputs = model.graph.output if(len(outputs)!=3):print("This isn't ScoreBoxKpt model!")quit()output_list=["output0","output1","output2"]for output in outputs:if output.name in score_box_kpt :print(f"output name: {output.name}")else:print("This isn't a fit model!")quit()def getConvList(endName):stack=[]stack.append(endName)convList=[]while(len(stack)):name=stack.pop()for node in graph.node:if name in node.output :if node.op_type=="Conv":if node.name not in convList :convList.append(node.name)else: for input in node.input:if input not in stack:stack.insert(0, input)return convListConv0=getConvList(output_list[0]) Conv1=getConvList(output_list[1]) Conv2=getConvList(output_list[2])def save2json(save_dict, name):if len(save_dict) == 0:print("this is nothing to save json")return Nonewith open(name, 'w') as fp:#{'a': 'Runoob', 'b': 7}json.dump(save_dict, fp, sort_keys=False, indent=4, separators=(',', ': ')) #default=strsave_dict = {output_list[0]:scoreConv,output_list[1]:boxConv,output_list[2]:kptConv}conv_list=Conv0+Conv1+Conv2#获取onnx中间节点的shape. output_dim_dic={} inferred_onnx_model = shape_inference.infer_shapes(model) inferred_graph = inferred_onnx_model.graph inferred_value_info = inferred_graph.value_info for node in graph.node:if node.name in conv_list:for value_info in inferred_value_info:if value_info.name==node.output[0]:output_dim_dic[node.name]=value_info.type.tensor_type;#删除conv 后面的onnx节点 # Find target node index for name in conv_list:target_node = Nonefor node in graph.node:if node.name == name:target_node=nodebreakoutput_names = []for output in target_node.output:output_names.append(output)set1=set(output_names)del_node = []have_new_del_node = Falsewhile True:have_new_del_node = Falsefor node in graph.node:if node.name in del_node:continueset2=set(node.input)if set1.intersection(set2): output_names+=node.output set1=set(output_names)del_node.append(node.name)have_new_del_node = Trueif have_new_del_node == False:breakfor node in graph.node:if node.name in del_node:print(f"1remove node {node.name}")model.graph.node.remove(node)have_new_del_node = False while True:have_new_del_node = Falsefor node1 in graph.node:if node1.name in conv_list :continueset1=set(node1.output)to_delete =Truefor node2 in graph.node:set2=set(node2.input)if set1.intersection(set2): to_delete = Falsebreakif to_delete == True:print(f"2remove node {node1.name}")model.graph.node.remove(node1)have_new_del_node=Trueif have_new_del_node == False :breaksave_output_name=[] for node in graph.node:if node.name in conv_list:#增加输出层output_info = onnx.helper.ValueInfoProto()node.output[0]=node.nameoutput_info.name = node.output[0]for dim_value in output_dim_dic[node.name].shape.dim:output_info.type.tensor_type.shape.dim.extend([dim_value])output_info.type.tensor_type.elem_type = TensorProto.FLOATprint(output_info)graph.output.extend([output_info])save_output_name.append(node.output[0])outputs = model.graph.output # 打印输出节点名称 for output in outputs:if output.name in save_output_name :continuemodel.graph.output.remove(output) outputs = model.graph.output # 打印输出节点名称 for output in outputs:if output.name in save_output_name :continuemodel.graph.output.remove(output) # Save modified ONNX model onnx.checker.check_model(model) onnx.save(model, "backbone.onnx") save2json(save_dict, 'conv_param.json'