Load the following data that diagnoses normal and abnormal (two…

  

Load the following data that diagnoses normal and abnormal (two… Load the following data that diagnoses normal and abnormal (two classes) behavior of the heart given a set of integer features collected by a SPECT machine. Train with SPECTF.train and test with SPECTF.test files. Notice that the labels are in the first column. SPECT data can be found in: http://archive.ics.uci.edu/ml/ machine-learning-databases/spect/. Please create a new .py file for this and import the necessary functions from neural.py, relu.py or sigmoid.py. Also implementation for the sigmoid activation function and its gradient in sigmoid.py.Implementation for the rectified linear layer activation function and its gradient in relu.py.And implement the forward and backward passes for a neural network. Test your model first with one sigmoid hidden layer and then replace the sigmoid with a relu layer. Fill in your implementation in  neural.py. Check your code with basic Sanity check found in neural.py with python neural.py  neural.pyimport numpy as npimport randomfrom q1_softmax import softmaxfrom q2_sigmoid import sigmoid, sigmoid_gradfrom q2_relu import relu, relu_gradfrom q2_gradcheck import gradcheck_naivedef CE(y, y_hat):   return(-np.sum(y * np.log(y_hat)) / len(y))def forward_backward_prop(data, labels, params, dimensions, activation=’sigmoid’):   “””   Forward and backward propagation for a two-layer sigmoidal network   Compute the forward propagation and for the cross entropy cost,   and backward propagation for the gradients for all parameters.   Arguments:   data — M x Dx matrix, where each row is a training example.   labels — M x Dy matrix, where each row is a one-hot vector.   params — Model parameters, these are unpacked for you.   dimensions — A tuple of input dimension, number of hidden units                 and output dimension   “””   # Unpack network parameters (do not modify)   ofs = 0   Dx, H, Dy = (dimensions[0], dimensions[1], dimensions[2])   W1 = np.reshape(params[ofs:ofs + Dx * H], (Dx, H))   ofs += Dx * H   b1 = np.reshape(params[ofs:ofs + H], (1, H))   ofs += H   W2 = np.reshape(params[ofs:ofs + H * Dy], (H, Dy))   ofs += H * Dy   b2 = np.reshape(params[ofs:ofs + Dy], (1, Dy))   # YOUR CODE HERE: forward propagation   raise NotImplementedError   # END YOUR CODE   # YOUR CODE HERE: backward propagation   raise NotImplementedError   # END YOUR CODE   assert W1.shape == gradW1.shape   assert W2.shape == gradW2.shape   assert W3.shape == gradW3.shape   assert b1.shape == gradb1.shape   assert b2.shape == gradb2.shape   assert b3.shape == gradb3.shape   # Stack gradients (do not modify)   grad = np.concatenate((gradW1.flatten(), gradb1.flatten(),                          gradW2.flatten(), gradb2.flatten()))   return cost, graddef sanity_check():   “””   Set up fake data and parameters for the neural network, and test using   gradcheck.   “””   print(“Running sanity check…”)   N = 20   dimensions = [10, 5, 10]   data = np.random.randn(N, dimensions[0])   # each row will be a datum   labels = np.zeros((N, dimensions[2]))   for i in xrange(N):       labels[i, random.randint(0, dimensions[2]-1)] = 1   params = np.random.randn((dimensions[0] + 1) * dimensions[1] + (       dimensions[1] + 1) * dimensions[2], )   gradcheck_naive(lambda params: forward_backward_prop(       data, labels, params, dimensions), params)def your_sanity_checks():   “””   Use this space add any additional sanity checks by running:       python q2_neural.py   This function will not be called by the autograder, nor will   your additional tests be graded.   “””   print “Running your sanity checks…”   # YOUR CODE HERE   # raise NotImplementedError   # END YOUR CODEif __name__ == “__main__”:   sanity_check()   your_sanity_checks()  relu.py import numpy as npdef relu(x):   “””   Compute the relu function for the input here.   Arguments:   x — A scalar or numpy array.   Return:   s — relu(x)   “””   ####################   # your answer here   ####################   raise NotImplementedError   # END YOUR CODE   return sdef relu_grad(s):   “””   Compute the gradient for the relu function here. Note that   for this implementation, the input s should be the relu   function value of your original input x.   Arguments:   s — A scalar or numpy array.   Return:   ds — Your computed gradient.   “””   ####################   # your answer here   ####################   raise NotImplementedError   # END YOUR CODE   return dsdef test_relu_basic():   “””   Some simple tests to get you started.   Warning: these are not exhaustive.   “””   print “Running basic tests…”   x = np.array([[1, 2], [-1, -2]])   f = relu(x)   g = relu_grad(f)   print f   f_ans = np.array([       [1, 2],       [0, 0]])   assert np.allclose(f, f_ans, rtol=1e-05, atol=1e-06)   print g   g_ans = np.array([       [1, 1],       [0, 0]])   assert np.allclose(g, g_ans, rtol=1e-05, atol=1e-06)   print “You should verify these results by hand!n”def test_relu():   “””   Use this space to test your relu implementation by running:       python q2_relu.py   This function will not be called by the autograder, nor will   your tests be graded.   “””   ####################   # your answer here   ####################   print “Running your tests…”   # END YOUR CODEif __name__ == “__main__”:   test_relu_basic()   test_relu()  sigmoid.pyimport numpy as npdef sigmoid(x):   “””   Compute the sigmoid function for the input here.   Arguments:   x — A scalar or numpy array.   Return:   s — sigmoid(x)   “””   ####################   # your answer here   ####################   raise NotImplementedError   # END YOUR CODE   return sdef sigmoid_grad(s):   “””   Compute the gradient for the sigmoid function here. Note that   for this implementation, the input s should be the sigmoid   function value of your original input x.   Arguments:   s — A scalar or numpy array.   Return:   ds — Your computed gradient.   “””   ####################   # your answer here   ####################   raise NotImplementedError   # END YOUR CODE   return dsdef test_sigmoid_basic():   “””   Some simple tests to get you started.   Warning: these are not exhaustive.   “””   print “Running basic tests…”   x = np.array([[1, 2], [-1, -2]])   f = sigmoid(x)   g = sigmoid_grad(f)   print f   f_ans = np.array([       [0.73105858, 0.88079708],       [0.26894142, 0.11920292]])   assert np.allclose(f, f_ans, rtol=1e-05, atol=1e-06)   print g   g_ans = np.array([       [0.19661193, 0.10499359],       [0.19661193, 0.10499359]])   assert np.allclose(g, g_ans, rtol=1e-05, atol=1e-06)   print “You should verify these results by hand!n”def test_sigmoid():   “””   Use this space to test your sigmoid implementation by running:       python q2_sigmoid.py   This function will not be called by the autograder, nor will   your tests be graded.   “””   print “Running your tests…”   #raise NotImplementedError   # END YOUR CODEif __name__ == “__main__”:   test_sigmoid_basic()   test_sigmoid()   Computer Science Engineering & Technology Python Programming CSC 658

Don't use plagiarized sources. Get Your Custom Essay on
Load the following data that diagnoses normal and abnormal (two…
Just from $13/Page
Order Essay
  

Leave a Reply

Your email address will not be published.

Related Post

Question NO What & the latitude and depending Explain it with a neat sketch. (b) The Reneyh and W.B off the sides off the closed trowverse…Question NO What & the latitude and depending Explain it with a neat sketch. (b) The Reneyh and W.B off the sides off the closed trowverse…

  These both questions are prerequisite kindly solve both Thankyou Image transcription textQuestion NO What & the latitude and depending Explain it with a neat sketch. (b) The Reneyh and W.C.Boff

READ MOREREAD MORE
Open chat
💬 Need help?
Hello 👋
Can we help you?