网络成本函数代码python实现

q8l4jmvw  于 2021-08-20  发布在  Java
关注(0)|答案(0)|浏览(177)

我在Python中执行安得烈NG的ML课程,在第5周练习4中,我指的是一个代码。我不明白的是在最终输出中需要使用np.trace()。矩阵可视化有问题吗

import numpy as np
from scipy.optimize import minimize
import scipy.io
import matplotlib.pyplot as plt

data_dict = scipy.io.loadmat('ex4_orig_octave/ex4data1.mat')

X = data_dict['X']
y = data_dict['y'].ravel()

M = X.shape[0]
N = X.shape[1] 
L = 26 # = number of nodes in the hidden layer (including bias node)
K = len(np.unique(y))

X = np.hstack((np.ones((M, 1)), X))

Y = np.zeros((M, K), dtype='uint8') 

for i, row in enumerate(Y):
    Y[i, y[i] - 1] = 1

weights_dict = scipy.io.loadmat('ex4_orig_octave/ex4weights.mat')

theta_1 = weights_dict['Theta1']
theta_2 = weights_dict['Theta2']

nn_params_saved = np.concatenate((theta_1.flatten(), theta_2.flatten()))

def nn_cost_function(nn_params, X, Y, M, N, L, K):
    """Python version of nnCostFunction.m after completing 'Part 1'."""

    # Unroll the parameter vector.
    theta_1 = nn_params[:(L - 1) * (N + 1)].reshape(L - 1, N + 1)
    theta_2 = nn_params[(L - 1) * (N + 1):].reshape(K, L)

    # Calculate activations in the second layer.
    a_2 = sigmoid(theta_1.dot(X.T))

    # Add the second layer's bias node.
    a_2_p = np.vstack((np.ones(M), a_2))

    # Calculate the activation of the third layer.
    a_3 = sigmoid(theta_2.dot(a_2_p))

    # Calculate the cost function.
    cost = 1 / M * np.trace(- Y.dot(np.log(a_3)) - (1 - Y).dot(np.log(1 - a_3)))

    return cost

cost_saved = nn_cost_function(nn_params_saved, X, Y, M, N, L, K) 

print 'Cost at parameters (loaded from ex4weights): %.6f' % cost_saved
print '(this value should be about 0.287629)'

暂无答案!

目前还没有任何答案,快来回答吧!

相关问题