This repository has been archived by the owner on Jan 5, 2023. It is now read-only.
forked from oramasearch/onnx-go
-
Notifications
You must be signed in to change notification settings - Fork 0
/
sigmoid_test.go
127 lines (123 loc) · 2.77 KB
/
sigmoid_test.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
package onnx_test
import (
"github.com/gogo/protobuf/proto"
"github.com/owulveryck/onnx-go/internal/onnx/ir"
)
var (
x0 = "x0"
w0 = "w0"
x1 = "x1"
w1 = "w1"
w2 = "w2"
y = "y"
exp = "Exp"
mul = "MatMul"
add = "Add"
minusOne = "-1"
one = "1"
pow = "Pow"
float = ir.TensorProto_DataType(ir.TensorProto_FLOAT)
// sigmoidNeuron
// 1
// y = --------------------------
// -(w0.x0 + w1.x1 + w2)
// 1+ e
// see http://cs231n.github.io/optimization-2/ for a representation
sigmoidNeuron = &ir.GraphProto{
Node: []*ir.NodeProto{
{
Input: []string{x0, w0},
Output: []string{"x0w0"},
OpType: mul,
},
{
Input: []string{x1, w1},
Output: []string{"x1w1"},
OpType: mul,
},
{
Input: []string{"x0w0", "x1w1"},
Output: []string{"x0w0+x1w1"},
OpType: add,
},
{
Input: []string{"x0w0+x1w1", w2},
Output: []string{"x0w0+x1w1+w2"},
OpType: add,
},
{
Input: []string{"x0w0+x1w1+w2", minusOne},
Output: []string{"-(x0w0+x1w1+w2)"},
OpType: mul,
},
{
Input: []string{"-(x0w0+x1w1+w2)"},
Output: []string{"exp(-(x0w0+x1w1+w2))"},
OpType: exp,
},
{
Input: []string{one, "exp(-(x0w0+x1w1+w2))"},
Output: []string{"1+exp(-(x0w0+x1w1+w2))"},
OpType: add,
},
{
Input: []string{"1+exp(-(x0w0+x1w1+w2))", minusOne},
Output: []string{y},
OpType: pow,
},
},
Initializer: []*ir.TensorProto{},
Input: []*ir.ValueInfoProto{
newValueProtoScalar(minusOne),
newValueProtoScalar(one),
newValueProtoScalar(x0),
newValueProtoScalar(w0),
newValueProtoScalar(x1),
newValueProtoScalar(w1),
newValueProtoScalar(w2),
},
Output: []*ir.ValueInfoProto{
newValueProtoScalar("x0w0"),
newValueProtoScalar("x1w1"),
newValueProtoScalar("x0w0+x1w1"),
newValueProtoScalar("x0w0+x1w1+w2"),
newValueProtoScalar("-(x0w0+x1w1+w2)"),
newValueProtoScalar("exp(-(x0w0+x1w1+w2))"),
newValueProtoScalar("1+exp(-(x0w0+x1w1+w2))"),
newValueProtoScalar(y),
},
}
sigmoidNeuronONNX []byte
)
func newValueProtoScalar(name string) *ir.ValueInfoProto {
return &ir.ValueInfoProto{
Name: name,
Type: &ir.TypeProto{
Value: &ir.TypeProto_TensorType{
TensorType: &ir.TypeProto_Tensor{
ElemType: int32(ir.TensorProto_FLOAT),
Shape: &ir.TensorShapeProto{
Dim: []*ir.TensorShapeProto_Dimension{
{
Value: &ir.TensorShapeProto_Dimension_DimValue{
DimValue: int64(1),
},
},
},
},
},
},
},
}
}
func init() {
model := &ir.ModelProto{
Graph: sigmoidNeuron,
}
var err error
//sigmoidNeuronONNX, err = model.Marshal()
sigmoidNeuronONNX, err = proto.Marshal(model)
if err != nil {
panic(err)
}
}