Skip to content

Commit

Permalink
Regularization: adding weight decay to Loss (#788)
Browse files Browse the repository at this point in the history
* Regularization: adding weight decay to Loss

* Corrected Javadoc comments

* Corrected Javadoc comments: checked with ./gradlew :api:checkstyleMain

* Corrected Javadoc comments again: checked with ./gradlew :api:checkstyleMain

Co-authored-by: Hugo Miguel Ferreira <[email protected]>
  • Loading branch information
hmf and Hugo Miguel Ferreira authored Mar 29, 2021
1 parent b835682 commit c317884
Show file tree
Hide file tree
Showing 5 changed files with 444 additions and 0 deletions.
101 changes: 101 additions & 0 deletions api/src/main/java/ai/djl/training/loss/ElasticNetWeightDecay.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/

package ai.djl.training.loss;

import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;

/**
* {@code ElasticWeightDecay} calculates L1+L2 penalty of a set of parameters. Used for
* regularization.
*
* <p>L loss is defined as \(L = \lambda_1 \sum_i \vert W_i\vert + \lambda_2 \sum_i {W_i}^2\).
*/
public class ElasticNetWeightDecay extends Loss {

private float lambda1;
private float lambda2;
private NDList parameters;

/**
* Calculates Elastic Net weight decay for regularization.
*
* @param parameters holds the model weights that will be penalized
*/
public ElasticNetWeightDecay(NDList parameters) {
this("ElasticNetWeightDecay", parameters);
}

/**
* Calculates Elastic Net weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
*/
public ElasticNetWeightDecay(String name, NDList parameters) {
this(name, parameters, 1);
}

/**
* Calculates Elastic Net weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda the weight to apply to the penalty value, default 1 (both L1 and L2)
*/
public ElasticNetWeightDecay(String name, NDList parameters, float lambda) {
super(name);
this.lambda1 = lambda;
this.lambda2 = lambda;
this.parameters = parameters;
}

/**
* Calculates Elastic Net weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda1 the weight to apply to the L1 penalty value, default 1
* @param lambda2 the weight to apply to the L2 penalty value, default 1
*/
public ElasticNetWeightDecay(String name, NDList parameters, float lambda1, float lambda2) {
super(name);
this.lambda1 = lambda1;
this.lambda2 = lambda2;
this.parameters = parameters;
}

private NDArray l1(NDArray w) {
return ((w.abs()).sum());
}

private NDArray l2(NDArray w) {
return ((w.square()).sum());
}

/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {

NDManager manager = parameters.getManager();
NDArray sum1 = manager.create(0.0f);
NDArray sum2 = manager.create(0.0f);
for (NDArray wi : parameters) {
sum1.addi(l1(wi));
sum2.addi(l2(wi));
}
return sum1.muli(lambda1).addi(sum2.muli(lambda2));
}
}
77 changes: 77 additions & 0 deletions api/src/main/java/ai/djl/training/loss/L1WeightDecay.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/

package ai.djl.training.loss;

import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;

/**
* {@code L1WeightDecay} calculates L1 penalty of a set of parameters. Used for regularization.
*
* <p>L1 loss is defined as \(L1 = \lambda \sum_i \vert W_i\vert\).
*/
public class L1WeightDecay extends Loss {

private float lambda;
private NDList parameters;

/**
* Calculates L1 weight decay for regularization.
*
* @param parameters holds the model weights that will be penalized
*/
public L1WeightDecay(NDList parameters) {
this("L1WeightDecay", parameters);
}

/**
* Calculates L1 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
*/
public L1WeightDecay(String name, NDList parameters) {
this(name, parameters, 1);
}

/**
* Calculates L1 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda the weight to apply to the penalty value, default 1
*/
public L1WeightDecay(String name, NDList parameters, float lambda) {
super(name);
this.lambda = lambda;
this.parameters = parameters;
}

private NDArray l1(NDArray w) {
return ((w.abs()).sum());
}

/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {

NDManager manager = parameters.getManager();
NDArray sum = manager.create(0.0f);
for (NDArray wi : parameters) {
sum.addi(l1(wi));
}
return sum.muli(lambda);
}
}
77 changes: 77 additions & 0 deletions api/src/main/java/ai/djl/training/loss/L2WeightDecay.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
/*
* Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
* with the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0/
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/

package ai.djl.training.loss;

import ai.djl.ndarray.NDArray;
import ai.djl.ndarray.NDList;
import ai.djl.ndarray.NDManager;

/**
* {@code L2WeightDecay} calculates L2 penalty of a set of parameters. Used for regularization.
*
* <p>L2 loss is defined by \(L2 = \lambda \sum_i {W_i}^2\).
*/
public class L2WeightDecay extends Loss {

private float lambda;
private NDList parameters;

/**
* Calculates L2 weight decay for regularization.
*
* @param parameters holds the model weights that will be penalized
*/
public L2WeightDecay(NDList parameters) {
this("L2WeightDecay", parameters);
}

/**
* Calculates L2 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
*/
public L2WeightDecay(String name, NDList parameters) {
this(name, parameters, 1);
}

/**
* Calculates L2 weight decay for regularization.
*
* @param name the name of the penalty
* @param parameters holds the model weights that will be penalized
* @param lambda the weight to apply to the penalty value, default 1
*/
public L2WeightDecay(String name, NDList parameters, float lambda) {
super(name);
this.lambda = lambda;
this.parameters = parameters;
}

private NDArray l2(NDArray w) {
return ((w.square()).sum());
}

/** {@inheritDoc} */
@Override
public NDArray evaluate(NDList label, NDList prediction) {

NDManager manager = parameters.getManager();
NDArray sum = manager.create(0.0f);
for (NDArray wi : parameters) {
sum.addi(l2(wi));
}
return sum.muli(lambda);
}
}
114 changes: 114 additions & 0 deletions api/src/main/java/ai/djl/training/loss/Loss.java
Original file line number Diff line number Diff line change
Expand Up @@ -239,6 +239,120 @@ public static HingeLoss hingeLoss(String name, int margin, float weight) {
return new HingeLoss(name, margin, weight);
}

/**
* Returns a new instance of {@link L1WeightDecay} with default weight and name.
*
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L1WeightDecay}
*/
public static L1WeightDecay l1WeightedDecay(NDList parameters) {
return new L1WeightDecay(parameters);
}

/**
* Returns a new instance of {@link L1WeightDecay} with default weight.
*
* @param name the name of the weight decay
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L1WeightDecay}
*/
public static L1WeightDecay l1WeightedDecay(String name, NDList parameters) {
return new L1WeightDecay(name, parameters);
}

/**
* Returns a new instance of {@link L1WeightDecay}.
*
* @param name the name of the weight decay
* @param weight the weight to apply on weight decay value, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L1WeightDecay}
*/
public static L1WeightDecay l1WeightedDecay(String name, float weight, NDList parameters) {
return new L1WeightDecay(name, parameters, weight);
}

/**
* Returns a new instance of {@link L2WeightDecay} with default weight and name.
*
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L2WeightDecay}
*/
public static L2WeightDecay l2WeightedDecay(NDList parameters) {
return new L2WeightDecay(parameters);
}

/**
* Returns a new instance of {@link L2WeightDecay} with default weight.
*
* @param name the name of the weight decay
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L2WeightDecay}
*/
public static L2WeightDecay l2WeightedDecay(String name, NDList parameters) {
return new L2WeightDecay(name, parameters);
}

/**
* Returns a new instance of {@link L2WeightDecay}.
*
* @param name the name of the weight decay
* @param weight the weight to apply on weight decay value, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link L2WeightDecay}
*/
public static L2WeightDecay l2WeightedDecay(String name, float weight, NDList parameters) {
return new L2WeightDecay(name, parameters, weight);
}

/**
* Returns a new instance of {@link ElasticNetWeightDecay} with default weight and name.
*
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(NDList parameters) {
return new ElasticNetWeightDecay(parameters);
}

/**
* Returns a new instance of {@link ElasticNetWeightDecay} with default weight.
*
* @param name the name of the weight decay
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(String name, NDList parameters) {
return new ElasticNetWeightDecay(name, parameters);
}

/**
* Returns a new instance of {@link ElasticNetWeightDecay}.
*
* @param name the name of the weight decay
* @param weight the weight to apply on weight decay values, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(
String name, float weight, NDList parameters) {
return new ElasticNetWeightDecay(name, parameters, weight);
}

/**
* Returns a new instance of {@link ElasticNetWeightDecay}.
*
* @param name the name of the weight decay
* @param weight1 the weight to apply on weight decay L1 value, default 1
* @param weight2 the weight to apply on weight decay L2 value, default 1
* @param parameters holds the model weights that will be penalized
* @return a new instance of {@link ElasticNetWeightDecay}
*/
public static ElasticNetWeightDecay elasticNetWeightedDecay(
String name, float weight1, float weight2, NDList parameters) {
return new ElasticNetWeightDecay(name, parameters, weight1, weight2);
}

/** {@inheritDoc} */
@Override
public void addAccumulator(String key) {
Expand Down
Loading

0 comments on commit c317884

Please sign in to comment.