6 from onnx
import helper, shape_inference, optimizer
7 from onnx
import numpy_helper
8 from onnx
import AttributeProto, TensorProto, GraphProto
11 X = helper.make_tensor_value_info(
'X', TensorProto.FLOAT, [1,784])
13 W_info = helper.make_tensor_value_info(
'W', TensorProto.FLOAT, [784,10])
14 W = np.random.randn(784,10).astype(np.float32)
15 W = numpy_helper.from_array(W,
'W')
17 B_info = helper.make_tensor_value_info(
'B', TensorProto.FLOAT, [1,10])
18 B = np.ones([1,10]).astype(np.float32)
19 B = numpy_helper.from_array(B,
'B')
22 Z = helper.make_tensor_value_info(
'Z', TensorProto.FLOAT, [1,10])
24 matmul = helper.make_node(
30 bias = helper.make_node(
36 softmax = helper.make_node(
44 graph_def = helper.make_graph(
45 nodes=[matmul, bias, softmax],
47 inputs = [X, W_info, B_info],
52 model_def = helper.make_model(graph_def, producer_name=
'benchmarks')
54 onnx.checker.check_model(model_def)
55 model_def = shape_inference.infer_shapes(model_def)
56 onnx.checker.check_model(model_def)
57 model_def = optimizer.optimize(model_def)
58 onnx.checker.check_model(model_def)
60 onnx.save_model(model_def,
'e1_mlp.onnx')