# your code gimport random
import sys
import numpy
import math
eta=0.0001 #learning rate
n=200 #number of training epoch
inp=30 #input layer size
m=60 #hidden layer size
y=0 #output signal
t=0 #target signal
e=0 #error
d_y=0 #local gradient for the last neuron
err=0 #calculated network error for output neuron
err_av=0 #average network error
path='dataTrain.txt' #training sample
path2='dataLaunch.txt' #launching a forecast
day = 365 #number of days of forecasting
x=[] #training sample
w=[[0 for j in range(inp)] for i in range(m)] #matrix of weights (synapses)
v=[0 for j in range(m)] #vector of weights of the output layer
y_s=[0 for j in range(m)] #hidden layer of neurons
d_y_s=[0 for j in range(m)] #local gradients for hidden layer neurons
averError=[] #array of NS errors (by epoch)
window=[] #sliding window
predict=[]
fact=[] #target values
training=[] #values obtained by the INS during training
y_s2=[0 for j in range(m)]
a=-1
b=1
def sigmoid(arg):
res = 1.0/(1+numpy.exp(-(arg-1)))
return res
def tanh(arg):
return (numpy.exp(arg) - numpy.exp(-arg)) / (numpy.exp(arg) + numpy.exp(-arg))
def tanh_der(arg):
return 1 - numpy.power(tanh(arg), 2)
def ReLu(arg):
return max(0.00001*arg, arg)
def ReLu_der(arg):
return 1 if arg > 0 else 0.00001
def init_x(path):
t=0 #auxiliary variable
for i in range(0, m):
for j in range(0, inp):
w[i][j]=random.random()*(1+1)-1
for j in range(0, m):
v[j]=random.random()*(1+1)-1
try:
f=open(path,'r')
except FileNotFoundError:
print("File not found")
sys.exit()
for k in f:
x.append([])
x[t].extend([float(x) for x in k.split()])
t=t+1
f.close()
for i in range(0, len(x)):
fact.append(x[i][30])
xmax=max(x[i])
xmin=min(x[i])
for j in range(0, len(x[i])):
x[i][j]=a+(x[i][j]-xmin)/(xmax-xmin)*(b-a)
x[i].insert(0, xmax)
x[i].insert(0, xmin)
random.shuffle(x)
def train():
for k in range(1, n+1): #cycle by epoch
err_av=0
for f in range(0, len(x)): #cycle by samples
for i in range(0, m):
sum=0
for j in range(0, inp):
sum=sum+w[i][j]*x[f][j+2] #weighted sum of inputs
y_s[i]=sum
for p in range(0, m):
y_s[p]=sigmoid(y_s[p]) #The activation signal is converted using the activation function
#calculating output neuron activity
sum2=0
for z in range(0, m):
sum2=sum2+v[z]*y_s[z]
y=sigmoid(sum2) #end of propagation. y - output
t=x[f][32] #target value
e=y-t
err=pow(e, 2)/2
err_av=err_av+err
if k == n:
training.append(((y-a)/(b-a))*(x[f][1]-x[f][0])+x[f][0])
d_y=e*y*(1-y)
#modification of the output layer weights
for q in range(0, m):
v[q]=v[q]-eta*d_y*y_s[q]
for r in range(0, m):
d_y_s[r]=d_y*v[r]
for i1 in range(0, m):
for j1 in range(0, inp):
w[i1][j1]=w[i1][j1]-eta*d_y_s[i1]*y_s[i1]*(1-y_s[i1])*x[f][j1+2]
averError.append(math.sqrt(err_av/n/(f+1)))
def prediction(window, predict, day):
predict.extend(window)
for z in range(1, day+1):
xmax=max(window)
xmin=min(window)
for t in range(0, len(window)):
window[t]=a+((window[t]-xmin)/(xmax-xmin))*(b-a)
for i in range(0, m):
sum=0
for j in range(0, inp):
sum=sum+w[i][j]*window[j]
y_s2[i]=sum
#calculating the activity of hidden layer neurons
for i in range(0, m):
y_s2[i]=sigmoid(y_s2[i])
#calculating output neuron activity
sum=0
for j in range(0, m):
sum=sum+v[j]*y_s2[j]
t=sigmoid(sum)
window.append(t)
window.pop(0)
predict.append(round(((t-a)/(b-a))*(xmax-xmin)+xmin, 2)) #denormalization of the output
for i in range(0, len(window)):
window[i]=((window[i]-a)/(b-a))*(xmax-xmin)+xmin #denormalization of the entire sliding window
try:
f=open(path2,'r')
except FileNotFoundError:
print("File not found")
sys.exit()
entrance=f.readline()
window.extend([float(x) for x in entrance.split()])
data=f.readline()
data=data.strip()
f.close()
init_x(path)
train()
prediction(window, predict, day)
IyB5b3VyIGNvZGUgZ2ltcG9ydCByYW5kb20KaW1wb3J0IHN5cwppbXBvcnQgbnVtcHkKaW1wb3J0IG1hdGgKCmV0YT0wLjAwMDEgI2xlYXJuaW5nIHJhdGUKbj0yMDAgI251bWJlciBvZiB0cmFpbmluZyBlcG9jaAppbnA9MzAgI2lucHV0IGxheWVyIHNpemUKbT02MCAjaGlkZGVuIGxheWVyIHNpemUKeT0wICNvdXRwdXQgc2lnbmFsCnQ9MCAjdGFyZ2V0IHNpZ25hbAplPTAgI2Vycm9yCmRfeT0wICNsb2NhbCBncmFkaWVudCBmb3IgdGhlIGxhc3QgbmV1cm9uCmVycj0wICNjYWxjdWxhdGVkIG5ldHdvcmsgZXJyb3IgZm9yIG91dHB1dCBuZXVyb24KZXJyX2F2PTAgI2F2ZXJhZ2UgbmV0d29yayBlcnJvcgpwYXRoPSdkYXRhVHJhaW4udHh0JyAjdHJhaW5pbmcgc2FtcGxlCnBhdGgyPSdkYXRhTGF1bmNoLnR4dCcgI2xhdW5jaGluZyBhIGZvcmVjYXN0CmRheSA9IDM2NSAjbnVtYmVyIG9mIGRheXMgb2YgZm9yZWNhc3RpbmcKCng9W10gI3RyYWluaW5nIHNhbXBsZQp3PVtbMCBmb3IgaiBpbiByYW5nZShpbnApXSBmb3IgaSBpbiByYW5nZShtKV0gI21hdHJpeCBvZiB3ZWlnaHRzIChzeW5hcHNlcykKdj1bMCBmb3IgaiBpbiByYW5nZShtKV0gI3ZlY3RvciBvZiB3ZWlnaHRzIG9mIHRoZSBvdXRwdXQgbGF5ZXIKeV9zPVswIGZvciBqIGluIHJhbmdlKG0pXSAjaGlkZGVuIGxheWVyIG9mIG5ldXJvbnMKZF95X3M9WzAgZm9yIGogaW4gcmFuZ2UobSldICNsb2NhbCBncmFkaWVudHMgZm9yIGhpZGRlbiBsYXllciBuZXVyb25zCmF2ZXJFcnJvcj1bXSAjYXJyYXkgb2YgTlMgZXJyb3JzIChieSBlcG9jaCkKd2luZG93PVtdICNzbGlkaW5nIHdpbmRvdwpwcmVkaWN0PVtdCmZhY3Q9W10gI3RhcmdldCB2YWx1ZXMKdHJhaW5pbmc9W10gI3ZhbHVlcyBvYnRhaW5lZCBieSB0aGUgSU5TIGR1cmluZyB0cmFpbmluZwp5X3MyPVswIGZvciBqIGluIHJhbmdlKG0pXQphPS0xCmI9MQoKZGVmIHNpZ21vaWQoYXJnKToKICByZXMgPSAxLjAvKDErbnVtcHkuZXhwKC0oYXJnLTEpKSkKICByZXR1cm4gcmVzCgpkZWYgdGFuaChhcmcpOgogICAgcmV0dXJuIChudW1weS5leHAoYXJnKSAtIG51bXB5LmV4cCgtYXJnKSkgLyAobnVtcHkuZXhwKGFyZykgKyBudW1weS5leHAoLWFyZykpCgpkZWYgdGFuaF9kZXIoYXJnKToKICAgIHJldHVybiAxIC0gbnVtcHkucG93ZXIodGFuaChhcmcpLCAyKQoKZGVmIFJlTHUoYXJnKToKICAgIHJldHVybiBtYXgoMC4wMDAwMSphcmcsIGFyZykKCmRlZiBSZUx1X2RlcihhcmcpOgogICAgcmV0dXJuIDEgaWYgYXJnID4gMCBlbHNlIDAuMDAwMDEKCmRlZiBpbml0X3gocGF0aCk6CiAgICB0PTAgI2F1eGlsaWFyeSB2YXJpYWJsZQoKICAgIGZvciBpIGluIHJhbmdlKDAsIG0pOgogICAgICBmb3IgaiBpbiByYW5nZSgwLCBpbnApOgogICAgICAgICAgd1tpXVtqXT1yYW5kb20ucmFuZG9tKCkqKDErMSktMQoKICAgIGZvciBqIGluIHJhbmdlKDAsIG0pOgogICAgICAgIHZbal09cmFuZG9tLnJhbmRvbSgpKigxKzEpLTEKICAgICAKICAgIHRyeToKICAgICAgIGY9b3BlbihwYXRoLCdyJykKICAgIGV4Y2VwdCBGaWxlTm90Rm91bmRFcnJvcjoKICAgICAgIHByaW50KCJGaWxlIG5vdCBmb3VuZCIpCiAgICAgICBzeXMuZXhpdCgpCiAgICAKICAgIGZvciBrIGluIGY6CiAgICAgICAgeC5hcHBlbmQoW10pCiAgICAgICAgeFt0XS5leHRlbmQoW2Zsb2F0KHgpIGZvciB4IGluIGsuc3BsaXQoKV0pCiAgICAgICAgdD10KzEKICAgIAogICAgZi5jbG9zZSgpCiAgICAKICAgIGZvciBpIGluIHJhbmdlKDAsIGxlbih4KSk6CiAgICAgZmFjdC5hcHBlbmQoeFtpXVszMF0pCiAgICAgeG1heD1tYXgoeFtpXSkKICAgICB4bWluPW1pbih4W2ldKQogICAgIGZvciBqIGluIHJhbmdlKDAsIGxlbih4W2ldKSk6CiAgICAgIHhbaV1bal09YSsoeFtpXVtqXS14bWluKS8oeG1heC14bWluKSooYi1hKQogICAgIHhbaV0uaW5zZXJ0KDAsIHhtYXgpIAogICAgIHhbaV0uaW5zZXJ0KDAsIHhtaW4pCiAgICByYW5kb20uc2h1ZmZsZSh4KQoKZGVmIHRyYWluKCk6IAogIGZvciBrIGluIHJhbmdlKDEsIG4rMSk6ICNjeWNsZSBieSBlcG9jaAogICAgZXJyX2F2PTAKICAgIGZvciBmIGluIHJhbmdlKDAsIGxlbih4KSk6ICNjeWNsZSBieSBzYW1wbGVzCiAgICAgIGZvciBpIGluIHJhbmdlKDAsIG0pOgogICAgICAgIHN1bT0wCiAgICAgICAgZm9yIGogaW4gcmFuZ2UoMCwgaW5wKToKICAgICAgICAgICBzdW09c3VtK3dbaV1bal0qeFtmXVtqKzJdICN3ZWlnaHRlZCBzdW0gb2YgaW5wdXRzCiAgICAgICAgeV9zW2ldPXN1bQogICAgICBmb3IgcCBpbiByYW5nZSgwLCBtKToKICAgICAgICB5X3NbcF09c2lnbW9pZCh5X3NbcF0pICNUaGUgYWN0aXZhdGlvbiBzaWduYWwgaXMgY29udmVydGVkIHVzaW5nIHRoZSBhY3RpdmF0aW9uIGZ1bmN0aW9uCiAgICAgICNjYWxjdWxhdGluZyBvdXRwdXQgbmV1cm9uIGFjdGl2aXR5CiAgICAgIHN1bTI9MAogICAgICBmb3IgeiBpbiByYW5nZSgwLCBtKToKICAgICAgICBzdW0yPXN1bTIrdlt6XSp5X3Nbel0KICAgICAgeT1zaWdtb2lkKHN1bTIpICNlbmQgb2YgcHJvcGFnYXRpb24uIHkgLSBvdXRwdXQKICAgICAgCiAgICAgIHQ9eFtmXVszMl0gI3RhcmdldCB2YWx1ZQogICAgICBlPXktdAogICAgICBlcnI9cG93KGUsIDIpLzIKICAgICAgZXJyX2F2PWVycl9hditlcnIKICAgICAgCiAgICAgIGlmIGsgPT0gbjoKICAgICAgICB0cmFpbmluZy5hcHBlbmQoKCh5LWEpLyhiLWEpKSooeFtmXVsxXS14W2ZdWzBdKSt4W2ZdWzBdKQogICAgICAKICAgICAgZF95PWUqeSooMS15KQogICAgICAjbW9kaWZpY2F0aW9uIG9mIHRoZSBvdXRwdXQgbGF5ZXIgd2VpZ2h0cwogICAgICBmb3IgcSBpbiByYW5nZSgwLCBtKToKICAgICAgICB2W3FdPXZbcV0tZXRhKmRfeSp5X3NbcV0KICAgICAgZm9yIHIgaW4gcmFuZ2UoMCwgbSk6CiAgICAgICAgZF95X3Nbcl09ZF95KnZbcl0gCiAgICAgIAogICAgICBmb3IgaTEgaW4gcmFuZ2UoMCwgbSk6CiAgICAgICAgZm9yIGoxIGluIHJhbmdlKDAsIGlucCk6CiAgICAgICAgICB3W2kxXVtqMV09d1tpMV1bajFdLWV0YSpkX3lfc1tpMV0qeV9zW2kxXSooMS15X3NbaTFdKSp4W2ZdW2oxKzJdCiAgICBhdmVyRXJyb3IuYXBwZW5kKG1hdGguc3FydChlcnJfYXYvbi8oZisxKSkpCgpkZWYgcHJlZGljdGlvbih3aW5kb3csIHByZWRpY3QsIGRheSk6CiAgcHJlZGljdC5leHRlbmQod2luZG93KQogIAogIGZvciB6IGluIHJhbmdlKDEsIGRheSsxKToKICAgeG1heD1tYXgod2luZG93KQogICB4bWluPW1pbih3aW5kb3cpCiAgIGZvciB0IGluIHJhbmdlKDAsIGxlbih3aW5kb3cpKToKICAgICB3aW5kb3dbdF09YSsoKHdpbmRvd1t0XS14bWluKS8oeG1heC14bWluKSkqKGItYSkKCiAgIGZvciBpIGluIHJhbmdlKDAsIG0pOgogICAgICBzdW09MAogICAgICBmb3IgaiBpbiByYW5nZSgwLCBpbnApOgogICAgICAgIHN1bT1zdW0rd1tpXVtqXSp3aW5kb3dbal0KICAgICAgeV9zMltpXT1zdW0KICAjY2FsY3VsYXRpbmcgdGhlIGFjdGl2aXR5IG9mIGhpZGRlbiBsYXllciBuZXVyb25zCiAgIGZvciBpIGluIHJhbmdlKDAsIG0pOgogICAgeV9zMltpXT1zaWdtb2lkKHlfczJbaV0pCiAgI2NhbGN1bGF0aW5nIG91dHB1dCBuZXVyb24gYWN0aXZpdHkKICAgc3VtPTAKICAgZm9yIGogaW4gcmFuZ2UoMCwgbSk6CiAgICAgc3VtPXN1bSt2W2pdKnlfczJbal0KICAgdD1zaWdtb2lkKHN1bSkKICAgCiAgIHdpbmRvdy5hcHBlbmQodCkKICAgd2luZG93LnBvcCgwKQogICBwcmVkaWN0LmFwcGVuZChyb3VuZCgoKHQtYSkvKGItYSkpKih4bWF4LXhtaW4pK3htaW4sIDIpKSAjZGVub3JtYWxpemF0aW9uIG9mIHRoZSBvdXRwdXQKICAgZm9yIGkgaW4gcmFuZ2UoMCwgbGVuKHdpbmRvdykpOgogICAgICAgd2luZG93W2ldPSgod2luZG93W2ldLWEpLyhiLWEpKSooeG1heC14bWluKSt4bWluICNkZW5vcm1hbGl6YXRpb24gb2YgdGhlIGVudGlyZSBzbGlkaW5nIHdpbmRvdyAgIAogICAgICAgIAp0cnk6CiAgICBmPW9wZW4ocGF0aDIsJ3InKQpleGNlcHQgRmlsZU5vdEZvdW5kRXJyb3I6CiAgICBwcmludCgiRmlsZSBub3QgZm91bmQiKQogICAgc3lzLmV4aXQoKQoKZW50cmFuY2U9Zi5yZWFkbGluZSgpCndpbmRvdy5leHRlbmQoW2Zsb2F0KHgpIGZvciB4IGluIGVudHJhbmNlLnNwbGl0KCldKQpkYXRhPWYucmVhZGxpbmUoKQpkYXRhPWRhdGEuc3RyaXAoKQpmLmNsb3NlKCkKCmluaXRfeChwYXRoKQp0cmFpbigpCnByZWRpY3Rpb24od2luZG93LCBwcmVkaWN0LCBkYXkp