±³¾ç °ú¸ñ °úÁ¦Àε¥ lrÀ» ÁÙÀ̴°Š¿Ü¿¡ loss°ªÀ» ÁÙÀÌ´Â ¹æ¹ýÀ» ¸ð¸£°Ú½À´Ï´Ù µµ¿ÍÁÖ¼¼¿ä.
import numpy as np
# ÀÔ·Â µ¥ÀÌÅÍ (Å°(cm), ¸ö¹«°Ô(kg))
X = np.array([170, 175, 180, 165, 160, 185])
y = np.array([68, 75, 80, 55, 50, 90])
# ¸ðµ¨ ÆĶó¹ÌÅÍ ÃʱâÈ (w: °¡ÁßÄ¡, b: ÀýÆí)
w = 0.0
b = 0.0
# ÇнÀ·ü(learning rate) ¼³Á¤
#ÇнÀ·üÀÌ ³Ê¹« Å©¸é °î¼±ÀÇ ÃÖÀúÁ¡À» ÀÌÅ»ÇÏ°í ÇнÀ·üÀÌ ³Ê¹« ÀÛÀ¸¸é ÇнÀ ½Ã°£ÀÌ
#¿À·¡ °É¸®¹Ç·Î ÀûÀýÇÑ °ªÀ¸·Î º¯°æÇØÁÝ´Ï´Ù.
lr = 0.0000001
# ¿¹Ãø ÇÔ¼ö Á¤ÀÇ
def predict(x):
y_pred = w*x + b
return y_pred
# ¼Õ½Ç ÇÔ¼ö Á¤ÀÇ (Æò±Õ Á¦°ö ¿ÀÂ÷)
def mse_loss(y_true, y_pred):
mse = np.mean((y_true - y_pred)**2)
return mse
# °æ»ç ÇÏ°¹ý(Gradient Descent)À» »ç¿ëÇÏ¿© ¼Õ½Ç ÇÔ¼ö ÃÖ¼ÒÈ
def train(X, y, w, b, lr, epochs):
for epoch in range(epochs):
# ¿¹Ãø °ª °è»ê
y_pred = predict(X)
# ¼Õ½Ç ÇÔ¼ö °è»ê
loss = mse_loss(y, y_pred)
# °¡ÁßÄ¡(w)¿Í ÀýÆí(b) ¾÷µ¥ÀÌÆ®
grad_w = np.mean((y_pred - y)*X)
grad_b = np.mean(y_pred - y)
w = w - lr*grad_w
b = b - lr*grad_b
# ·Î±× Ãâ·Â
if epoch % 100 == 0:
print("Epoch %d: loss=%.4f, w=%.4f, b=%.4f" % (epoch, loss, w, b))
return w, b
# ÇнÀ ½ÇÇà
w, b = train(X, y, w, b, lr, epochs=1000) |
(êgpt°¡ ÀÌ·±°Å ¿À·ù Àß Àâ¾ÆÁشٴ ±Û º¸±ä Ç߳׿ä)