import numpy as np
class Sigmoid:
def forward(self, x, w, b):
"""
x: (N, D)
w: (D,)
b: scalar
"""
self.x = x
self.w = w
z = np.dot(x, w) + b # ← こっちの方が安全
self.y_pred = 1 / (1 + np.exp(-z))
return self.y_pred
def backward(self, dy):
"""
dy: (N,)
"""
dz = dy * (1.0 - self.y_pred) * self.y_pred # (N,)
# 勾配計算
dw = np.dot(self.x.T, dz) # (D,)
db = np.sum(dz) # scalar
return dw, db
class NegativeLogLikelihood:
def forward(self, y_pred, y_true):
"""
y_pred: (N,)
y_true: (N,)
"""
self.y_pred = y_pred
self.y_true = y_true
# 数値安定化(log(0)防止)
eps = 1e-7
y_pred = np.clip(y_pred, eps, 1 - eps)
loss = - (y_true * np.log(y_pred) +
(1 - y_true) * np.log(1 - y_pred))
return np.sum(loss)
def backward(self):
eps = 1e-7
y_pred = np.clip(self.y_pred, eps, 1 - eps)
dy = - (self.y_true / y_pred) + ((1 - self.y_true) / (1 - y_pred))
return dy
np.random.seed(0)
# ダミーデータ
x = np.random.randn(5, 3) # (N=5, D=3)
w = np.random.randn(3)
b = 0.1
y_true = np.array([1, 0, 1, 0, 1])
# モデル
sigmoid = Sigmoid()
loss_fn = NegativeLogLikelihood()
# forward
y_pred = sigmoid.forward(x, w, b)
loss = loss_fn.forward(y_pred, y_true)
print("loss:", loss)
# backward
dy = loss_fn.backward()
dw, db = sigmoid.backward(dy)
print("dw:", dw)
print("db:", db)