@@ -223,8 +223,8 @@ def __init__(
223223 use_softmax: whether to use softmax to transform the original logits into probabilities.
224224 If True, softmax is used. If False, sigmoid is used. Defaults to False.
225225 delta : weight of the background. Defaults to 0.7.
226- gamma : value of the exponent gamma in the definition of the Focal loss. Defaults to 0.75 .
227- weight : weight for each loss function . Defaults to 0.5.
226+ gamma : value of the exponent gamma in the definition of the Focal loss. Defaults to 2 .
227+ weight: weight for combining the focal and focal-Tversky terms . Defaults to 0.5.
228228
229229 Example:
230230 >>> import torch
@@ -241,10 +241,16 @@ def __init__(
241241 self .delta = delta
242242 self .use_softmax = use_softmax
243243 self .asy_focal_loss = AsymmetricFocalLoss (
244- to_onehot_y = to_onehot_y , gamma = self .gamma , delta = self .delta , use_softmax = use_softmax
244+ to_onehot_y = to_onehot_y ,
245+ use_softmax = use_softmax ,
246+ delta = self .delta ,
247+ gamma = self .gamma ,
245248 )
246249 self .asy_focal_tversky_loss = AsymmetricFocalTverskyLoss (
247- to_onehot_y = to_onehot_y , gamma = self .gamma , delta = self .delta , use_softmax = use_softmax
250+ to_onehot_y = to_onehot_y ,
251+ use_softmax = use_softmax ,
252+ delta = self .delta ,
253+ gamma = self .gamma ,
248254 )
249255
250256 def forward (self , y_pred : torch .Tensor , y_true : torch .Tensor ) -> torch .Tensor :
0 commit comments