Loss
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
# Create a new graph
Graph().as_default()
X = placeholder()
c = placeholder()
W = Variable([
[1, -1],
[1, -1]
])
b = Variable([0, 0])
p = softmax(add(matmul(X, W), b))
# Cross-entropy loss
J = negative(reduce_sum(reduce_sum(multiply(c, log(p)), axis=1)))
session = Session()
print(session.run(J, {
X: np.concatenate((blue_points, red_points)),
c:
[[1, 0]] * len(blue_points)
+ [[0, 1]] * len(red_points)
}))
Enter to Rename, Shift+Enter to Preview