分享

(Keras)——Keras中自定义目标函数(损失函数)

 LibraryPKU 2021-03-17
"""Built-in loss functions. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import six from . import backend as K from .utils.generic_utils import deserialize_keras_object from .utils.generic_utils import serialize_keras_object def mean_squared_error(y_true, y_pred): return K.mean(K.square(y_pred - y_true), axis=-1) def mean_absolute_error(y_true, y_pred): return K.mean(K.abs(y_pred - y_true), axis=-1) def mean_absolute_percentage_error(y_true, y_pred): diff = K.abs((y_true - y_pred) / K.clip(K.abs(y_true), K.epsilon(), None)) return 100. * K.mean(diff, axis=-1) def mean_squared_logarithmic_error(y_true, y_pred): first_log = K.log(K.clip(y_pred, K.epsilon(), None) + 1.) second_log = K.log(K.clip(y_true, K.epsilon(), None) + 1.) return K.mean(K.square(first_log - second_log), axis=-1) def squared_hinge(y_true, y_pred): return K.mean(K.square(K.maximum(1. - y_true * y_pred, 0.)), axis=-1) def hinge(y_true, y_pred): return K.mean(K.maximum(1. - y_true * y_pred, 0.), axis=-1) def categorical_hinge(y_true, y_pred): pos = K.sum(y_true * y_pred, axis=-1) neg = K.max((1. - y_true) * y_pred, axis=-1) return K.maximum(0., neg - pos + 1.) def logcosh(y_true, y_pred): """Logarithm of the hyperbolic cosine of the prediction error. `log(cosh(x))` is approximately equal to `(x ** 2) / 2` for small `x` and to `abs(x) - log(2)` for large `x`. This means that 'logcosh' works mostly like the mean squared error, but will not be so strongly affected by the occasional wildly incorrect prediction. # Arguments y_true: tensor of true targets. y_pred: tensor of predicted targets. # Returns Tensor with one scalar loss entry per sample. """ def _logcosh(x): return x + K.softplus(-2. * x) - K.log(2.) return K.mean(_logcosh(y_pred - y_true), axis=-1) def categorical_crossentropy(y_true, y_pred): return K.categorical_crossentropy(y_true, y_pred) def sparse_categorical_crossentropy(y_true, y_pred): return K.sparse_categorical_crossentropy(y_true, y_pred) def binary_crossentropy(y_true, y_pred): return K.mean(K.binary_crossentropy(y_true, y_pred), axis=-1) def kullback_leibler_divergence(y_true, y_pred): y_true = K.clip(y_true, K.epsilon(), 1) y_pred = K.clip(y_pred, K.epsilon(), 1) return K.sum(y_true * K.log(y_true / y_pred), axis=-1) def poisson(y_true, y_pred): return K.mean(y_pred - y_true * K.log(y_pred + K.epsilon()), axis=-1) def cosine_proximity(y_true, y_pred): y_true = K.l2_normalize(y_true, axis=-1) y_pred = K.l2_normalize(y_pred, axis=-1) return -K.sum(y_true * y_pred, axis=-1) # Aliases. mse = MSE = mean_squared_error mae = MAE = mean_absolute_error mape = MAPE = mean_absolute_percentage_error msle = MSLE = mean_squared_logarithmic_error kld = KLD = kullback_leibler_divergence cosine = cosine_proximity def serialize(loss): return serialize_keras_object(loss) def deserialize(name, custom_objects=None): return deserialize_keras_object(name, module_objects=globals(), custom_objects=custom_objects, printable_module_name='loss function') def get(identifier): """Get the `identifier` loss function. # Arguments identifier: None or str, name of the function. # Returns The loss function or None if `identifier` is None. # Raises ValueError if unknown identifier. """ if identifier is None: return None if isinstance(identifier, six.string_types): identifier = str(identifier) return deserialize(identifier) if isinstance(identifier, dict): return deserialize(identifier) elif callable(identifier): return identifier else: raise ValueError('Could not interpret ' 'loss function identifier:', identifier)

    本站是提供个人知识管理的网络存储空间,所有内容均由用户发布,不代表本站观点。请注意甄别内容中的联系方式、诱导购买等信息,谨防诈骗。如发现有害或侵权内容,请点击一键举报。
    转藏 分享 献花(0

    0条评论

    发表

    请遵守用户 评论公约