Share Brilliantly
neural networks
let data = [
{inputs:[[1], [0.5], [0.7], [0]], output:0.8},
{inputs:[[0.5], [0.3], [0.76], [0.2]], output:0.5},
]
let loss = function(){
let weights = [...arguments];
let sum = 0;
for(let item of data){
let result = calculate(item.inputs, ...weights);
sum += (result[0][0]-item.output)*(result[0][0]-item.output);
}
return sum;
}
let op = await import('/lib/numeric/optimization/v1.0.0/gradient-descent.mjs');
let min = op.iterate(loss, ()=>0.01, weights, 0.0000001, 1000000);
let op = await import('/lib/numeric/optimization/v1.0.0/gradient-descent.mjs');
let la = await import('/lib/linear-algebra/v1.0.0/linear-algebra.mjs');
let inputs = [[1], [0.5], [0.7], [0]];
let data = [
{inputs:[[1], [0.5], [0.7], [0]], output:0.8},
{inputs:[[0.5], [0.3], [0.76], [0.2]], output:0.5},
]
let weights = [0.1,0.5,0.2,0.2,0.8,0.7,0.8,0.2,0.8,0.1];
let sigmoid = x=> Math.exp(x)/(Math.exp(x) + 1)
let calculateLayer = function(inputs, weights){
let value = la.multiply(weights, inputs);
return value.map(p=>[sigmoid(p[0])])
}
let calculate = function(){
let weights = [...arguments];
let inputs = weights.shift();
let shape1 = la.shape(weights, 2, 4);
let weights1 = shape1[0];
let shape2 = la.shape(shape1[1], 1, 2);
let weights2 = shape2[0];
let y1 = calculateLayer(inputs, weights1);
let y2 = calculateLayer(y1, weights2);
return y2;
}
let result = calculate(inputs, ...weights);
let loss = function(){
let weights = [...arguments];
let sum = 0;
for(let item of data){
let result = calculate(item.inputs, ...weights);
sum += (result[0][0]-item.output)*(result[0][0]-item.output);
}
return sum;
}
let test = loss(...weights);
let min = op.iterate(loss, ()=>0.01, weights, 0.0000001, 1000000);
Try it!