I wrote this in js which is kind of what I explained:
// ---
// | |
// --- \
// \
// --- \ ---
// | |---| |
// --- / ---
// /
// --- /
// | |
// ---
//
// Nueral Network Library
// Because I'm bored
function sigmoid(z) {
return 1 / (1 + Math.exp(-z));
}
var nueronThing = -5
var amountAdd = 0
class Nueron {
constructor(previousNuerons) {
var weights = []
for (var i = 0; i < previousNuerons; i++) {
nueronThing = nueronThing + (nueronThing/5 )
weights.push()
}
this.weights = weights
this.bias = 0
}
updateBias(bias) {
this.bias = bias
}
setInput(input) {
this.activiation = input
}
updateWeights(weights) {
this.weights = weights
}
getWeights() {
return this.weights
}
computeAcitivation(previousNuerons) {
var weights = this.weights
var activiation = 0
previousNuerons.forEach(function(nueron,index) {
activiation = activiation + (nueron.activiation*weights[index])
})
this.withoutSigmoid = (activiation-this.bias)
activiation = sigmoid(activiation-this.bias)
this.activiation = activiation
return activiation
}
}
function softmax(arrayActiavtion) {
var activationThing = []
var activationSum = 0
var activiations = []
arrayActiavtion.forEach(function(activated) {
activationThing.push(Math.exp(activated))
activationSum = activationSum + Math.exp(activated)
})
activationThing.forEach(function(item,index) {
activiations.push((item/activationSum))
})
return activiations
}
class NueralNetwork {
constructor(nueralSturcture, useSoftmax) {
var nueralThing = []
nueralSturcture.forEach(function(amount,index) {
nueralThing.push([])
var previousAmount = 0
if (index > 0) {
previousAmount = nueralSturcture[(index-1)]
}
for (var i = 0; i < amount; i++) {
nueralThing[(nueralThing.length-1)].push(new Nueron(previousAmount))
}
})
this.useSoftmax = useSoftmax
this.structure = nueralThing
}
runNetwork(input) {
var activiations = []
var previousValue = []
var totalAcitivations = []
var withoutSigmoid = []
var useSoftmax = this.useSoftmax
var structure = this.structure
this.structure.forEach(function(thing,index) {
totalAcitivations.push([])
withoutSigmoid.push([])
thing.forEach(function(nueron,index2) {
if (index == 0) {
nueron.setInput(input[index2])
totalAcitivations[index].push(input[index2])
withoutSigmoid[index].push(input[index2])
} else {
nueron.computeAcitivation(previousValue)
totalAcitivations[index].push(nueron.activiation)
withoutSigmoid[index].push(nueron.withoutSigmoid)
}
})
if (index == (structure.length-1)) {
if (useSoftmax == true) {
var activationThing = []
thing.forEach(function(activated) {
activationThing.push(activated.activiation)
})
activiations = softmax(activationThing)
} else {
thing.forEach(function(nueron) {
activiations.push(nueron.activiation)
})
}
} else {
previousValue = thing
}
})
return [activiations, totalAcitivations, withoutSigmoid]
}
exportWeightsAndBiases() {
var weights = []
var biases = []
this.structure.forEach(function(layer,index) {
if (index > 0) {
weights.push([])
biases.push([])
}
layer.forEach(function(nueron) {
weights[(weights.length-1)].push(nueron.weights)
biases[(biases.length-1)].push(nueron.bias)
})
})
return [weights,biases]
}
learn(dataSet, epochs) {
for (var epochIndex = 0; epochIndex < epochs; epochIndex++) {
var weightUpdates = []
var biasUpdates = []
this.structure.forEach(function(_,index) {
if (index > 0) {
weightUpdates.push([])
}
})
this.structure.forEach(function(layer,index) {
if (index > 0) {
biasUpdates.push([])
layer.forEach(function() {
biasUpdates[(index-1)].push(0)
})
}
})
var totalAverageCost = 0
for (var dataSetIndex = 0; dataSetIndex < dataSet.length; dataSetIndex++) {
var output = this.runNetwork(dataSet[dataSetIndex][0])
output[0].forEach(function(predicted,index) {
totalAverageCost = totalAverageCost + (((dataSet[dataSetIndex][1][index]-predicted)*(dataSet[dataSetIndex][1][index]-predicted)))
})
var lastLayerDerivative = []
if (this.useSoftmax != true) {
output[0].forEach(function(predicted,index) {
lastLayerDerivative.push((2*(dataSet[dataSetIndex][1][index]-predicted))*(sigmoid(output[2][(output[2].length-1)][index])*(1-sigmoid(output[2][(output[2].length-1)][index]))))
})
} else {
var softMaxArray = softmax(output[2][(output[2].length-1)])
output[0].forEach(function(predicted,index) {
lastLayerDerivative.push((2*(dataSet[dataSetIndex][1][index]-predicted))*(softMaxArray[index]*(1-softMaxArray[index])))
})
}
var nueralSturucture = this.structure
nueralSturucture[(nueralSturucture.length-1)].forEach(function(nueron,index2) {
var weightsNew = []
nueron.weights.forEach(function(weight,index) {
var learningRateMultipledDerriviative = (output[1][(output[1].length-2)][index]*lastLayerDerivative[index2])
weightsNew.push(learningRateMultipledDerriviative)
})
//console.log('Layer ' + (nueralSturucture.length) + ' Nueron ' + (index2+1) + ' bias: ' + lastLayerDerivative[index2])
if (weightUpdates[(weightUpdates.length-1)][index2] == undefined) {
weightUpdates[(weightUpdates.length-1)].push(weightsNew)
} else {
var newWeights = []
weightUpdates[(weightUpdates.length-1)][index2].forEach(function(weightUpdate,index) {
newWeights.push((weightUpdate+weightsNew[index]))
})
weightUpdates[(weightUpdates.length-1)][index2] = newWeights
}
biasUpdates[(biasUpdates.length-1)][index2] = biasUpdates[(biasUpdates.length-1)][index2] + lastLayerDerivative[index2]
})
for (var i = (nueralSturucture.length-2); i > 0; i--) {
var layer = output[2][i]
var weightsOrganizedByOrder = []
nueralSturucture[(i+1)].forEach(function(nueron,index2) {
nueron.weights.forEach(function(weight,index) {
if (index2 == 0) {
weightsOrganizedByOrder.push([])
}
weightsOrganizedByOrder[index].push(weight)
})
})
var averageWeights = []
weightsOrganizedByOrder.forEach(function(weightArray,index2) {
var amount = 0
weightArray.forEach(function(weight,index) {
amount = amount + (weight*lastLayerDerivative[index])
})
averageWeights.push(amount)
})
var layerDeriviative = []
layer.forEach(function(data,index) {
layerDeriviative.push((sigmoid(data)*(1-sigmoid(data)))*averageWeights[index])
})
lastLayerDerivative = layerDeriviative
nueralSturucture[i].forEach(function(nueron,index2) {
var weightsNew = []
nueron.weights.forEach(function(weight,index) {
var learningRateMultipledDerriviative = (output[1][i-1][index]*lastLayerDerivative[index2])
weightsNew.push((learningRateMultipledDerriviative))
})
//console.log('Layer ' + (i+1) + ' Nueron ' + (index2+1) + ' bias: ' + lastLayerDerivative[index2])
if (weightUpdates[(i-1)][index2] == undefined) {
weightUpdates[(i-1)].push(weightsNew)
} else {
var newWeights = []
weightUpdates[(i-1)][index2].forEach(function(weightUpdate,index) {
newWeights.push((weightUpdate+weightsNew[index]))
})
weightUpdates[(i-1)][index2] = newWeights
}
biasUpdates[(i-1)][index2] = biasUpdates[(i-1)][index2] + lastLayerDerivative[index2]
})
}
}
this.structure.forEach(function(layer,index) {
if (index > 0) {
layer.forEach(function(nueron,index2) {
var newWeights = []
weightUpdates[(index-1)][index2].forEach(function(weightDerivative, index) {
newWeights.push((nueron.weights[index]-(weightDerivative*(0.1/dataSet.length))))
})
nueron.updateWeights(newWeights)
nueron.updateBias((nueron.bias-(biasUpdates[(index-1)][index2]*(1/dataSet.length))))
})
}
})
console.log('epoch: ' + epochIndex + '; cost: ' + totalAverageCost + '; average cost: ' + (totalAverageCost/dataSet.length))
}
}
}
var fs = require('fs')
async function trainModel() {
var NN = new NueralNetwork([5,16,16,10], false)
var exported = NN.exportWeightsAndBiases()
fs.writeFileSync('./weights2.json', JSON.stringify({data: exported[0]}))
fs.writeFileSync('./biases2.json', JSON.stringify({data: exported[1]}))
NN.learn([[[0,0,1,0,1],[0,0,0,0,0,0,0,0,1,0]],[[1,0,0,0,1],[0,0,1,0,0,0,0,0,0,0]]],100)
console.log(NN.runNetwork(dataSetOragnzied[0][0])[0])
var exported2 = NN.exportWeightsAndBiases()
fs.writeFileSync('./weights.json', JSON.stringify({data: exported2[0]}))
fs.writeFileSync('./biases.json', JSON.stringify({data: exported2[1]}))
}
trainModel()