Skip to content

Instantly share code, notes, and snippets.

@lukebelliveau
Last active April 23, 2023 17:31
Show Gist options
  • Select an option

  • Save lukebelliveau/9f700e4a9f0d1b445bff3b085e331210 to your computer and use it in GitHub Desktop.

Select an option

Save lukebelliveau/9f700e4a9f0d1b445bff3b085e331210 to your computer and use it in GitHub Desktop.
// generated by GPT-4
class MLP {
constructor(inputSize, hiddenSize, outputSize, learningRate) {
this.inputSize = inputSize;
this.hiddenSize = hiddenSize;
this.outputSize = outputSize;
this.learningRate = learningRate;
this.weightsInputHidden = new Array(inputSize * hiddenSize)
.fill(0)
.map(() => Math.random() * 2 - 1);
this.weightsHiddenOutput = new Array(hiddenSize * outputSize)
.fill(0)
.map(() => Math.random() * 2 - 1);
this.biasHidden = new Array(hiddenSize)
.fill(0)
.map(() => Math.random() * 2 - 1);
this.biasOutput = new Array(outputSize)
.fill(0)
.map(() => Math.random() * 2 - 1);
this.velocityInputHidden = new Array(inputSize * hiddenSize).fill(0);
this.velocityHiddenOutput = new Array(hiddenSize * outputSize).fill(0);
this.velocityBiasHidden = new Array(hiddenSize).fill(0);
this.velocityBiasOutput = new Array(outputSize).fill(0);
this.momentum = 0.9;
}
sigmoid(x) {
return 1 / (1 + Math.exp(-x));
}
sigmoidDerivative(x) {
return x * (1 - x);
}
forward(inputs) {
const hidden = [];
for (let i = 0; i < this.hiddenSize; i++) {
let sum = 0;
for (let j = 0; j < this.inputSize; j++) {
sum += inputs[j] * this.weightsInputHidden[j * this.hiddenSize + i];
}
sum += this.biasHidden[i];
hidden[i] = this.sigmoid(sum);
}
const output = [];
for (let i = 0; i < this.outputSize; i++) {
let sum = 0;
for (let j = 0; j < this.hiddenSize; j++) {
sum += hidden[j] * this.weightsHiddenOutput[j * this.outputSize + i];
}
sum += this.biasOutput[i];
output[i] = this.sigmoid(sum);
}
return { hidden, output };
}
train(inputs, targets) {
const { hidden, output } = this.forward(inputs);
const outputErrors = targets.map((target, i) => target - output[i]);
const hiddenErrors = this.biasHidden.map((_, i) => {
return this.weightsHiddenOutput.reduce((sum, weight, j) => {
if (j % this.outputSize === i) {
return sum + weight * outputErrors[j % this.outputSize];
}
return sum;
}, 0);
});
for (let i = 0; i < this.outputSize; i++) {
for (let j = 0; j < this.hiddenSize; j++) {
const deltaWeight =
this.learningRate *
outputErrors[i] *
this.sigmoidDerivative(output[i]) *
hidden[j];
this.velocityHiddenOutput[j * this.outputSize + i] =
this.momentum * this.velocityHiddenOutput[j * this.outputSize + i] +
deltaWeight;
this.weightsHiddenOutput[j * this.outputSize + i] +=
this.velocityHiddenOutput[j * this.outputSize + i];
}
this.velocityBiasOutput[i] =
this.momentum * this.velocityBiasOutput[i] +
this.learningRate * outputErrors[i] * this.sigmoidDerivative(output[i]);
this.biasOutput[i] += this.velocityBiasOutput[i];
}
for (let i = 0; i < this.hiddenSize; i++) {
for (let j = 0; j < this.inputSize; j++) {
const deltaWeight =
this.learningRate *
hiddenErrors[i] *
this.sigmoidDerivative(hidden[i]) *
inputs[j];
this.velocityInputHidden[j * this.hiddenSize + i] =
this.momentum * this.velocityInputHidden[j * this.hiddenSize + i] +
deltaWeight;
this.weightsInputHidden[j * this.hiddenSize + i] +=
this.velocityInputHidden[j * this.hiddenSize + i];
}
this.velocityBiasHidden[i] =
this.momentum * this.velocityBiasHidden[i] +
this.learningRate * hiddenErrors[i] * this.sigmoidDerivative(hidden[i]);
this.biasHidden[i] += this.velocityBiasHidden[i];
}
}
predict(inputs) {
const { output } = this.forward(inputs);
return output.map((o) => (o >= 0.5 ? 1 : 0));
}
}
const logicGate = "XOR";
const trainingData = {
AND: [
{ inputs: [0, 0], output: [0] },
{ inputs: [0, 1], output: [0] },
{ inputs: [1, 0], output: [0] },
{ inputs: [1, 1], output: [1] },
],
OR: [
{ inputs: [0, 0], output: [0] },
{ inputs: [0, 1], output: [1] },
{ inputs: [1, 0], output: [1] },
{ inputs: [1, 1], output: [1] },
],
NAND: [
{ inputs: [0, 0], output: [1] },
{ inputs: [0, 1], output: [1] },
{ inputs: [1, 0], output: [1] },
{ inputs: [1, 1], output: [0] },
],
NOR: [
{ inputs: [0, 0], output: [1] },
{ inputs: [0, 1], output: [0] },
{ inputs: [1, 0], output: [0] },
{ inputs: [1, 1], output: [0] },
],
XOR: [
{ inputs: [0, 0], output: [0] },
{ inputs: [0, 1], output: [1] },
{ inputs: [1, 0], output: [1] },
{ inputs: [1, 1], output: [0] },
],
};
const inputSize = 2;
const hiddenSize = 6;
const outputSize = 1;
const learningRate = 0.3;
const mlp = new MLP(inputSize, hiddenSize, outputSize, learningRate);
// Train the MLP
for (let i = 0; i < 5000000; i++) {
const trainingSample = trainingData[logicGate][Math.floor(Math.random() * 4)];
mlp.train(trainingSample.inputs, trainingSample.output);
}
// Test the MLP
console.log("Learned " + logicGate + " gate:");
console.log("0, 0 ->", mlp.predict([0, 0]));
console.log("0, 1 ->", mlp.predict([0, 1]));
console.log("1, 0 ->", mlp.predict([1, 0]));
console.log("1, 1 ->", mlp.predict([1, 1]));
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment