DenseNet Classifier: Detecting Regions of Interest in Synthetic Signals#

This example demonstrates how to use DeepPeak’s DenseNet classifier to identify regions of interest (ROIs) in synthetic 1D signals containing Gaussian peaks.

We will: - Generate a dataset of noisy signals with random Gaussian peaks - Build and train a DenseNet classifier to detect ROIs - Visualize the training process and model predictions

Note

This example is fully reproducible and suitable for Sphinx-Gallery documentation.

Imports and reproducibility#

import numpy as np

from DeepPeak.machine_learning.classifier import DenseNet
from DeepPeak.signals import Kernel, SignalDatasetGenerator

np.random.seed(42)

Generate synthetic dataset#

NUM_PEAKS = 3
SEQUENCE_LENGTH = 200

generator = SignalDatasetGenerator(n_samples=100, sequence_length=SEQUENCE_LENGTH)

dataset = generator.generate(
    signal_type=Kernel.GAUSSIAN,
    n_peaks=(1, NUM_PEAKS),
    amplitude=(1, 20),
    position=(0.1, 0.9),
    width=(0.03, 0.05),
    noise_std=0.1,
    categorical_peak_count=False,
    compute_region_of_interest=True,
)

Visualize a few example signals and their regions of interest#

dataset.plot(number_of_samples=3)
classifier dense

Build and summarize the DenseNet classifier#

dense_net = DenseNet(
    sequence_length=SEQUENCE_LENGTH,
    filters=(32, 64, 128),
    dilation_rates=(1, 2, 4),
    kernel_size=3,
    optimizer="adam",
    loss="binary_crossentropy",
    metrics=["accuracy"],
)
dense_net.build()
dense_net.summary()
Model: "DenseNetDetector"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                    ┃ Output Shape           ┃       Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ input (InputLayer)              │ (None, 200, 1)         │             0 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ conv_0 (Conv1D)                 │ (None, 200, 32)        │           128 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ conv_1 (Conv1D)                 │ (None, 200, 64)        │         6,208 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ conv_2 (Conv1D)                 │ (None, 200, 128)       │        24,704 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ ROI (Conv1D)                    │ (None, 200, 1)         │           129 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 31,169 (121.75 KB)
 Trainable params: 31,169 (121.75 KB)
 Non-trainable params: 0 (0.00 B)

Train the classifier#

history = dense_net.fit(
    dataset.signals,
    dataset.region_of_interest,
    validation_split=0.2,
    epochs=20,
    batch_size=64,
)
Epoch 1/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 1s 1s/step - accuracy: 0.9453 - loss: 0.6770
2/2 ━━━━━━━━━━━━━━━━━━━━ 1s 199ms/step - accuracy: 0.9474 - loss: 0.6745 - val_accuracy: 0.9613 - val_loss: 0.6526
Epoch 2/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 37ms/step - accuracy: 0.9502 - loss: 0.6451
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9496 - loss: 0.6391 - val_accuracy: 0.9613 - val_loss: 0.6211
Epoch 3/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9489 - loss: 0.6105
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 53ms/step - accuracy: 0.9496 - loss: 0.6096 - val_accuracy: 0.9613 - val_loss: 0.5923
Epoch 4/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9519 - loss: 0.5785
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9496 - loss: 0.5748 - val_accuracy: 0.9677 - val_loss: 0.5634
Epoch 5/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9584 - loss: 0.5444
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9596 - loss: 0.5410 - val_accuracy: 0.9613 - val_loss: 0.5250
Epoch 6/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9486 - loss: 0.5045
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9496 - loss: 0.5039 - val_accuracy: 0.9613 - val_loss: 0.4859
Epoch 7/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9491 - loss: 0.4698
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9510 - loss: 0.4663 - val_accuracy: 0.9790 - val_loss: 0.4432
Epoch 8/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9741 - loss: 0.4254
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 53ms/step - accuracy: 0.9731 - loss: 0.4209 - val_accuracy: 0.9760 - val_loss: 0.3922
Epoch 9/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9702 - loss: 0.3761
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9664 - loss: 0.3731 - val_accuracy: 0.9783 - val_loss: 0.3395
Epoch 10/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 37ms/step - accuracy: 0.9715 - loss: 0.3277
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 53ms/step - accuracy: 0.9700 - loss: 0.3233 - val_accuracy: 0.9790 - val_loss: 0.2865
Epoch 11/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9688 - loss: 0.2819
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9706 - loss: 0.2741 - val_accuracy: 0.9772 - val_loss: 0.2355
Epoch 12/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9697 - loss: 0.2328
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9684 - loss: 0.2298 - val_accuracy: 0.9755 - val_loss: 0.1904
Epoch 13/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9681 - loss: 0.1935
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9681 - loss: 0.1894 - val_accuracy: 0.9790 - val_loss: 0.1514
Epoch 14/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9702 - loss: 0.1559
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9702 - loss: 0.1549 - val_accuracy: 0.9788 - val_loss: 0.1216
Epoch 15/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9701 - loss: 0.1326
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 49ms/step - accuracy: 0.9700 - loss: 0.1288 - val_accuracy: 0.9778 - val_loss: 0.1002
Epoch 16/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9681 - loss: 0.1152
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 47ms/step - accuracy: 0.9697 - loss: 0.1115 - val_accuracy: 0.9783 - val_loss: 0.0853
Epoch 17/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9688 - loss: 0.0988
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 54ms/step - accuracy: 0.9692 - loss: 0.0978 - val_accuracy: 0.9772 - val_loss: 0.0784
Epoch 18/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 36ms/step - accuracy: 0.9698 - loss: 0.0934
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9693 - loss: 0.0918 - val_accuracy: 0.9788 - val_loss: 0.0702
Epoch 19/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9686 - loss: 0.0862
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9677 - loss: 0.0857 - val_accuracy: 0.9785 - val_loss: 0.0666
Epoch 20/20

1/2 ━━━━━━━━━━━━━━━━━━━━ 0s 35ms/step - accuracy: 0.9683 - loss: 0.0798
2/2 ━━━━━━━━━━━━━━━━━━━━ 0s 48ms/step - accuracy: 0.9682 - loss: 0.0821 - val_accuracy: 0.9785 - val_loss: 0.0647

Plot training history#

dense_net.plot_model_history()
accuracy, loss, val_accuracy, val_loss

Predict and visualize on a test signal#

dense_net.plot_prediction(signal=dataset.signals[0:1, :], threshold=0.4)
Predicted Region of Interest
<Figure size 1200x500 with 1 Axes>

Total running time of the script: (0 minutes 4.281 seconds)

Gallery generated by Sphinx-Gallery