-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest_mlops.mojo
More file actions
145 lines (107 loc) · 3.79 KB
/
test_mlops.mojo
File metadata and controls
145 lines (107 loc) · 3.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
from random import rand
from testing import assert_equal
from test_tensorutils import assert_tensors_equal
import qryptum.nn as nn
from qryptum import Tensor, TensorShape
from qryptum import Graph, Symbol, OP
from qryptum.autograd.ops.mlops import SIGMOID, RELU, TANH
from qryptum.utils.tensorutils import fill
alias dtype = DType.float32
alias nelts: Int = simdwidthof[dtype]()
# ------ Test Unary Ops ------
fn test_unary_op[
op: OP, t1_shape: TensorShape
](t1: Tensor[dtype], expected: Tensor[dtype]) raises:
fn create_graph() -> Graph:
var g = Graph()
var t1 = g.input(t1_shape)
var res = g.op(op, t1)
g.out(res)
return g ^
alias graph = create_graph()
assert_equal(len(graph.nodes), 1)
var model = nn.Model[graph](inference_only=True)
var res = model.inference(t1)[0]
assert_tensors_equal(res, expected)
fn test_SIGMOID() raises:
alias t1_shape = TensorShape(2, 3)
var t1: Tensor[dtype] = Tensor[dtype](t1_shape) # filled with zeroes
var expected = Tensor[dtype](2, 3)
fill(expected, 0.5)
test_unary_op[OP.SIGMOID, t1_shape](t1, expected)
fn test_backward_SIGMOID() raises:
alias t1_shape = TensorShape(2, 3)
alias ug_shape = TensorShape(2, 3)
var t1: Tensor[dtype] = Tensor[dtype](t1_shape) # filled with zeroes
var ug: Tensor[dtype] = Tensor[dtype](ug_shape)
fill(ug, 5.0)
var expected_grad = Tensor[dtype](2, 3)
fill(
expected_grad, 5.0 * 0.25
) # 0.25 = d(sigmoid(0))/dx = sigmoid(0) * (1 - sigmoid(0))
var grad = SIGMOID.backward[ug_shape, t1_shape](ug, t1)
assert_tensors_equal(grad, expected_grad)
fn test_RELU() raises:
alias t1_shape = TensorShape(2, 3)
var t1: Tensor[dtype] = Tensor[dtype](t1_shape)
# TODO: When tensors can do slices, this could be changed to two fill functions.
for i in range(3):
t1[i] = 3
for i in range(3, 6):
t1[i] = -3
var expected = Tensor[dtype](2, 3)
for i in range(3):
expected[i] = 3
for i in range(3, 6):
expected[i] = 0
test_unary_op[OP.RELU, t1_shape](t1, expected)
fn test_backward_RELU() raises:
alias t1_shape = TensorShape(2, 3)
alias ug_shape = TensorShape(2, 3)
var t1: Tensor[dtype] = Tensor[dtype](t1_shape)
var ug: Tensor[dtype] = Tensor[dtype](ug_shape)
for i in range(3):
t1[i] = 3
for i in range(3, 6):
t1[i] = -3
fill(ug, 5.0)
var expected_grad = Tensor[dtype](2, 3)
for i in range(3):
expected_grad[i] = 1 * 5.0 # 1 = d(relu(3))/dx
for i in range(3, 6):
expected_grad[i] = 0 * 5.0 # 0 = d(relu(-3))/dx
var grad = RELU.backward[ug_shape, t1_shape](ug, t1)
assert_tensors_equal(grad, expected_grad)
fn test_TANH() raises:
alias t1_shape = TensorShape(2, 3)
var t1: Tensor[dtype] = Tensor[dtype](t1_shape) # filled with zeroes
var expected = Tensor[dtype](2, 3)
fill(expected, 0.0)
test_unary_op[OP.TANH, t1_shape](t1, expected)
fn test_backward_TANH() raises:
alias t1_shape = TensorShape(2, 3)
alias ug_shape = TensorShape(2, 3)
var t1: Tensor[dtype] = Tensor[dtype](t1_shape) # filled with zeroes
var ug: Tensor[dtype] = Tensor[dtype](ug_shape)
fill(ug, 5.0)
var expected_grad = Tensor[dtype](2, 3)
fill(expected_grad, 5.0 * 1.0) # 1.0 = d(tanh(0))/dx = 1 - tanh(0)^2
var grad = TANH.backward[ug_shape, t1_shape](ug, t1)
assert_tensors_equal(grad, expected_grad)
fn main():
try:
test_SIGMOID()
test_RELU()
test_TANH()
except e:
print("[ERROR] Error in forward mlops")
print(e)
return
try:
test_backward_SIGMOID()
test_backward_RELU()
test_backward_TANH()
except e:
print("[ERROR] Error in backward mlops")
print(e)
return