import plotly
import plotly.graph_objs as go
plotly.offline.init_notebook_mode(connected=True)
import numpy as np
from liback import *
def NLclassifier(x,y):
out = 0
if ((x**2 + 2*y**2 > 1) and (x**2 + 2*y**2 < 3)):
out = 1
if (x**2 + 2*y**2 >= 3):
out = 2
return out
lower = -2.5
upper = -lower
train_npts = 200000
data_train = np.random.uniform(lower , upper , 2 * train_npts).reshape(train_npts , 2)
outcolor = np.array(list(map(lambda x , y : NLclassifier(x,y) , data_train[:,0], data_train[:,1]))).reshape(train_npts,1)
data_train = np.append(data_train,outcolor,axis = 1)
# we create a dictionary to transform the classifier into a binary vector to apply softmax
classtovector = {0.0:np.array([1.0,0,0]), 1.0 : np.array([0,1.0,0]) , 2.0 :np.array([0,0,1.0])}
# our vectors are always column matrices
softmax_ff = np.transpose(np.array(list(map(lambda x : softmax(classtovector[x]) , data_train[:,2]))))
print(softmax_ff.shape)
class0 = data_train[data_train[:,2]==0]
class1 = data_train[data_train[:,2]==1]
class2 = data_train[data_train[:,2]==2]
# we just represent one^th of the points
part_of_pts = 100
plot_pts0 = int(class0.shape[0]/part_of_pts)
plot_pts1 = int(class1.shape[0]/part_of_pts)
plot_pts2 = int(class2.shape[0]/part_of_pts)
x0 = class0[0:plot_pts0,0]
y0 = class0[0:plot_pts0,1]
trace0 = go.Scatter(
x = x0,
y = y0,
name = 'Class 0',
mode = 'markers',
marker = dict(
size = 5,
color = 'rgba(203, 119, 111, .8)',
line = dict(
width = 1,
color = 'rgb(0, 0, 0)'
)
)
)
x1 = class1[0:plot_pts1,0]
y1 = class1[0:plot_pts1,1]
trace1 = go.Scatter(
x = x1,
y = y1,
name = 'Class 1',
mode = 'markers',
marker = dict(
size = 5,
color = 'rgba(209, 207 , 250, .8)',
line = dict(
width = 1,
color = 'rgb(0, 0, 0)'
)
)
)
x2 = class2[0:plot_pts2,0]
y2 = class2[0:plot_pts2,1]
trace2 = go.Scatter(
x = x2,
y = y2,
name = 'Class 2',
mode = 'markers',
marker = dict(
size = 5,
line = dict(
width = 1,
color = 'rgb(0, 0, 0)'
)
)
)
plotdata_train = [trace0,trace1,trace2]
plotly.offline.iplot(plotdata_train)
# the parameters of the ANN
# number of epochs
nepchs = 70
# number of hidden layers
nhls = 2
# dimension of the input
input_dim = 2
# dimension of the hidden layer 1
hid1_dim = 20
# dimension of the hidden layer 2
hid2_dim = 20
# dimension of the classifier
out_dim = 3
learn_rate = 0.00001
# we start by the initialization of all the matrices necessary for the ANN
matA01 = MAT_init(hid1_dim, input_dim)
matb01 = MAT_init(hid1_dim , 1)
matA12 = MAT_init(hid2_dim , hid1_dim)
matb12 = MAT_init(hid2_dim , 1)
matAout = MAT_init(out_dim , hid2_dim)
matbout = MAT_init(out_dim , 1)
for j in range(nepchs):
learn_rate *= 0.9999
for i in range(train_npts):
#####################################################################
# input
inpt = np.array([data_train[i,0],data_train[i,1]]).reshape(2,1)
# moving forward along the net
middle01 , out01 = LayerForward(inpt , matA01 , matb01 , ReLU)
middle12 , out12 = LayerForward(out01 , matA12 , matb12 , ReLU)
# the last layer with softmax
middleout , outout = LastLayerSoftmaxForward(out12 , matAout , matbout)
#####################################################################
#####################################################################
# Backprop ##########################################################
#####################################################################
# bring on the classification
classff = softmax_ff[:,i].reshape(out_dim,1)
deltaAout , deltabout = LastLayerSoftmaxUpdate(out12 , outout , matAout , matbout , classff , learn_rate)
matAout += deltaAout
matbout += deltabout
# the chain rule from the Last Layer to the second hidden layer
TMAT = LastLayerSoftmaxBackward(outout , matAout , classff)
deltaA12 , deltab12 = LayerUpdate(out01 , middle12 , out12 , matA12 , matb12 , TMAT , DReLU , learn_rate)
matA12 += deltaA12
matb12 += deltab12
TMAT = np.matmul(TMAT , LayerBackward(middle12 , out12 , matA12, DReLU))
deltaA01 , deltab01 = LayerUpdate(inpt , middle01 , out01 , matA01 , matb01 , TMAT , DReLU , learn_rate)
matA01 += deltaA01
matb01 += deltab01
test_npts = 1000
data_test = np.random.uniform(lower , upper , 2*test_npts).reshape(test_npts,2)
outcolor_teste = np.array(
list(map(lambda x , y : NLclassifier(x,y) , data_test[:,0], data_test[:,1]))).reshape(test_npts,1)
data_test = np.append(data_test,outcolor_teste,axis = 1)
# our vectors are always column matrices
ff_test = np.transpose(np.array(list(map(lambda x : softmax(classtovector[x]) , data_test[:,2]))))
print(ff_test.shape)
classificador_test = np.zeros(test_npts)
for i in range(test_npts):
# input
inpt = np.array([data_test[i,0],data_test[i,1]]).reshape(2,1)
middle01 , out01 = LayerForward(inpt , matA01 , matb01 , ReLU)
middle12 , out12 = LayerForward(out01 , matA12 , matb12 , ReLU)
middleout , outout = LastLayerSoftmaxForward(out12 , matAout , matbout)
# make the classification
if (np.argmax(outout) != np.argmax(ff_test[:,i]) ) : classificador_test[i] = 1.0
print(classificador_test.sum()/classificador_test.size)
testcl0 = data_test[classificador_test[:] == 0.0,:]
print(testcl0.shape)
testcl1 = data_test[classificador_test[:] == 1.0,:]
print(testcl1.shape)
xt0 = testcl0[:,0]
yt0 = testcl0[:,1]
test_trace0 = go.Scatter(
x = xt0,
y = yt0,
name = 'Correct Classification',
mode = 'markers',
marker = dict(
size = 5,
color = 'rgba(203, 119, 111, .8)',
line = dict(
width = 1,
color = 'rgb(0, 0, 0)'
)
)
)
xt1 = testcl1[:,0]
yt1 = testcl1[:,1]
test_trace1 = go.Scatter(
x = xt1,
y = yt1,
name = 'Incorrect Classification',
mode = 'markers',
marker = dict(
size = 5,
color = 'rgba(111, 156 , 203, .8)',
line = dict(
width = 1,
color = 'rgb(0, 0, 0)'
)
)
)
plot_test=[test_trace0,test_trace1]
plotly.offline.iplot(plot_test)