def svm_loss(x, y):
"""
Computes the loss and gradient using for multiclass SVM classification.
Inputs:
- x: Input data, of shape (N, C) where x[i, j] is the score for the jth
class for the ith input.
- y: Vector of labels, of shape (N,) where y[i] is the label for x[i] and
0 <= y[i] < C
Returns a tuple of:
- loss: Scalar giving the loss
- dx: Gradient of the loss with respect to x
"""
loss, dx = None, None
###########################################################################
# TODO: Copy over your solution from A1.
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
# 구현 (2023-09-01)
N = len(y)
x = x - x[np.arange(N),y][:,np.newaxis] + 1 # this part
x = np.maximum(0, x)
loss = (np.sum(x) - N) / N
我发现x = x - x[np.arange(N),y][:,np.newaxis] + 1
和x -= (x[np.arange(N),y][:,np.newaxis] - 1)
给予不同的结果。
为什么会发生这种情况?
1条答案
按热度按时间fumotvh31#
对于一个简单的演示案例,我得到了同样的东西:
这并没有什么区别,但您可以使用一个索引添加新维度: