# neural network v2, 3 years later
import numpy as np
def neuron(weights, inputs, bias):
return (sum(np.multiply(np.array(weights), np.array(inputs)), bias))
def relu(neuron):
if(neuron > 0):
return neuron**2
else:
return 0.015625 * neuron
def reluderiv(neuron):
if(neuron > 0):
return neuron*2
else:
return 0.015625
connections = [[[], []], [[], [], [], [], [], [], [], [], [], []]]
traindata = [[[], []], [[], []]]
pastlayers = []
for u in traindata:
layer = u[0]
for i in connections:
last = layer
layer = []
for k in i:
layer.append(relu(neuron(k[0], last, float(k[1]))))
pastlayers.append(layer)
layerarr = np.array(layer)
trainarr = np.array(u[1])
totalerror = abs(sum(layerarr-trainarr))
totalerrorsquared = sum(np.square(layerarr-trainarr))/2
for k in layer:
errorderiv = k - u[1]
IyBuZXVyYWwgbmV0d29yayB2MiwgMyB5ZWFycyBsYXRlcgppbXBvcnQgbnVtcHkgYXMgbnAKZGVmIG5ldXJvbih3ZWlnaHRzLCBpbnB1dHMsIGJpYXMpOgoJcmV0dXJuIChzdW0obnAubXVsdGlwbHkobnAuYXJyYXkod2VpZ2h0cyksIG5wLmFycmF5KGlucHV0cykpLCBiaWFzKSkKZGVmIHJlbHUobmV1cm9uKToKCWlmKG5ldXJvbiA+IDApOgoJCXJldHVybiBuZXVyb24qKjIKCWVsc2U6CgkJcmV0dXJuIDAuMDE1NjI1ICogbmV1cm9uCmRlZiByZWx1ZGVyaXYobmV1cm9uKToKCWlmKG5ldXJvbiA+IDApOgoJCXJldHVybiBuZXVyb24qMgoJZWxzZToKCQlyZXR1cm4gMC4wMTU2MjUKY29ubmVjdGlvbnMgPSBbW1tdLCBbXV0sIFtbXSwgW10sIFtdLCBbXSwgW10sIFtdLCBbXSwgW10sIFtdLCBbXV1dCnRyYWluZGF0YSA9IFtbW10sIFtdXSwgW1tdLCBbXV1dCnBhc3RsYXllcnMgPSBbXQpmb3IgdSBpbiB0cmFpbmRhdGE6CglsYXllciA9IHVbMF0KCWZvciBpIGluIGNvbm5lY3Rpb25zOgoJCWxhc3QgPSBsYXllcgoJCWxheWVyID0gW10KCQlmb3IgayBpbiBpOgoJCQlsYXllci5hcHBlbmQocmVsdShuZXVyb24oa1swXSwgbGFzdCwgZmxvYXQoa1sxXSkpKSkKCQlwYXN0bGF5ZXJzLmFwcGVuZChsYXllcikKCWxheWVyYXJyID0gbnAuYXJyYXkobGF5ZXIpCgl0cmFpbmFyciA9IG5wLmFycmF5KHVbMV0pCgl0b3RhbGVycm9yID0gYWJzKHN1bShsYXllcmFyci10cmFpbmFycikpCgl0b3RhbGVycm9yc3F1YXJlZCA9IHN1bShucC5zcXVhcmUobGF5ZXJhcnItdHJhaW5hcnIpKS8yCglmb3IgayBpbiBsYXllcjoKCQllcnJvcmRlcml2ID0gayAtIHVbMV0K