Desde Tensores hasta transformaciones no lineales, descenso de gradiente y funcion de perdida
import os
import sys
module_path = os.path.abspath(os.path.join('..'))
if module_path not in sys.path:
    sys.path.append(module_path)
from lightdlf_old.cpu.core import Tensor
from lightdlf_old.cpu.layers import Linear, Tanh, Sigmoid, Relu, Sequential, MSELoss
from lightdlf_old.cpu.optimizers import SGD
import numpy as np
np.random.seed(0)

data = Tensor(np.array([[0,0],[0,1],[1,0],[1,1]]), autograd=True)   # (4,2)
target = Tensor(np.array([[0],[1],[0],[1]]), autograd=True)         # (4,1)

model = Sequential([Linear(2,3), 
                    Tanh(), 
                    Linear(3,1),
                    Sigmoid()])
criterion = MSELoss()
# optim = SGD(model.get_parameters(), alpha=0.05) # Lineal
optim = SGD(model.get_parameters(), alpha=1) # Tanh, Sigmoid

for i in range(10):
    # Predecir
    pred = model.forward(data)
    
    # Comparar
    loss = criterion.forward(pred, target)
    
    # Aprender
    loss.backward(Tensor(np.ones_like(loss.data)))
    optim.step()
    
    print(loss)
[1.06372865]
[0.75148144]
[0.57384259]
[0.39574294]
[0.2482279]
[0.15515294]
[0.10423398]
[0.07571169]
[0.05837623]
[0.04700013]