Skip to content

Commit

Permalink
Ability to use regularizers. CLoses #10
Browse files Browse the repository at this point in the history
  • Loading branch information
sjawhar committed Mar 18, 2020
1 parent bfe893f commit 625978b
Show file tree
Hide file tree
Showing 2 changed files with 29 additions and 3 deletions.
23 changes: 22 additions & 1 deletion app/src/models.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import json
import logging
from tensorflow.keras import regularizers
from tensorflow.keras.callbacks import Callback, ModelCheckpoint, TensorBoard
from tensorflow.keras.layers import (
Activation,
Expand All @@ -19,6 +20,7 @@
PARAM_ACTIVATION = "activation"
PARAM_INPUT_SHAPE = "input_shape"
PARAM_RETURN_SEQUENCES = "return_sequences"
PARAM_POOL_SIZE = "pool_size"
PARAM_UNITS = "units"
PARAM_VERBOSE = "verbose"

Expand Down Expand Up @@ -47,6 +49,21 @@ def add_ic_layer(model, name, dropout=0.2, batchnorm=True):


def add_layer(model, layer, name, ic_params={}, **kwargs):
for arg in kwargs:
if not arg.endswith("_regularizer"):
continue
config = kwargs[arg]
if config is None or isinstance(config, regularizers.Regularizer):
continue
reg = (
regularizers.L1L2(**config)
if type(config) is dict
else regularizers.get(config)
)
logger.debug(
f"Adding regularizer {type(reg).__name__} to {name}: {reg.get_config()}"
)
kwargs[arg] = reg
logger.debug(f'Adding layer "{name}" - {layer.__name__}: {kwargs}')
model.add(layer(name=name, **kwargs))
if type(ic_params) is not dict:
Expand All @@ -56,7 +73,11 @@ def add_layer(model, layer, name, ic_params={}, **kwargs):

def add_conv1d_layer(model, name, ic_params={}, pool=None, **kwargs):
add_layer(model, Conv1D, name, ic_params=None, **kwargs)
if pool is not None:
if pool:
if pool is True:
pool = {}
elif type(pool) is int:
pool = {PARAM_POOL_SIZE: pool}
add_layer(model, MaxPooling1D, f"{name}_pool", ic_params=None, **pool)
add_ic_layer(model, name, **ic_params)

Expand Down
9 changes: 7 additions & 2 deletions docs/Usage/Usage.md
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,6 @@ Channel name for Right Aux. Must be provided if Right Aux has data, otherwise ch
### Train
Builds a model (currently only LSTM is supported) and saves the built model and diagram image in `MODEL_DIR`. If `--epochs` is not 0, also trains the model on the data in `DATA_FILE` and saves the trained model and training history to `MODEL_DIR`. Even if `--epochs` is not 0, `DATA_FILE` is required to determine the input size to the LSTM.

Note that if `--shuffle-samples` is not true, only samples belonging to contiguous sequences of length `--sequence-size` are used.

```bash
train [arguments] DATA_FILE MODEL_DIR
```
Expand Down Expand Up @@ -153,3 +151,10 @@ Save TensorBoard logs every epoch. Default is false

**`-g, --gradient-metrics`**
Print metrics in Gradient chart format every epoch. Default is false

#### Usage Notes
* If `--shuffle-samples` is not true, only samples belonging to contiguous sequences of length `--sequence-size` are used.
* Layer specification parameters (e.g. `--lstm`, `--conv1d`) should be list of objects in JSON format, where each object in the list contains the specification of a single layer. To create only one layer, you can also use a single JSON object instead of an array of length 1. Please refer to the YAML files in the .ps_project folder for examples.
* Each layer specification object can include one or more regularization parameters (e.g. `kernel_regularizer`). The value of these parameters should be either a string or a dict. For example:
* `"l1"` or `"l2"` will create the corresponding regularizer with default parameters.
* `{"l1": 0.1, "l2": 0.2}` will create an l1_l2 regularizer with parameters `l1=0.1, l2=0.2`. You don't need to include both l1 and l2 values.

0 comments on commit 625978b

Please sign in to comment.