Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Script to combine generator and detector checkpoints into a single one for training #41

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Empty file added src/scripts/__init__.py
Empty file.
50 changes: 50 additions & 0 deletions src/scripts/combine_checkpoints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.


from pathlib import Path
from typing import Union

import torch


def combine_checkpoints(
generator_checkpoint: Union[str, Path],
detector_checkpoint: Union[str, Path],
output_checkpoint: Union[str, Path],
):
"""Combine split generator and detector checkpoints into a single checkpoint that can be further trained."""
gen_ckpt = torch.load(generator_checkpoint)
det_ckpt = torch.load(detector_checkpoint)

combined_ckpt = {
"xp.cfg": gen_ckpt["xp.cfg"], # assuming the configs are identical
"model": {},
}

# add generator layers with appropriate prefix
for layer in gen_ckpt["model"].keys():
new_layer = f"generator.{layer}"
combined_ckpt["model"][new_layer] = gen_ckpt["model"][layer]

# add detector layers with appropriate prefix
for layer in det_ckpt["model"].keys():
new_layer = f"detector.{layer}"
combined_ckpt["model"][new_layer] = det_ckpt["model"][layer]

# special case for 'msg_processor.msg_processor.weight'
if "msg_processor.msg_processor.weight" in gen_ckpt["model"]:
combined_ckpt["model"]["msg_processor.msg_processor.0.weight"] = gen_ckpt[
"model"
]["msg_processor.msg_processor.weight"]

torch.save(combined_ckpt, output_checkpoint)


if __name__ == "__main__":
import fire

fire.Fire(combine_checkpoints)
99 changes: 99 additions & 0 deletions tests/test_combine_checkpoints.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
# Copyright (c) Meta Platforms, Inc. and affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.


import urllib
from pathlib import Path

import pytest
import torch
import torchaudio

from audioseal import AudioSeal
from audioseal.builder import (
AudioSealDetectorConfig,
AudioSealWMConfig,
create_detector,
create_generator,
)
from audioseal.models import AudioSealDetector, AudioSealWM
from scripts.combine_checkpoints import combine_checkpoints


@pytest.fixture
def ckpts_dir() -> Path:
path = Path("TMP")
path.mkdir(exist_ok=True, parents=True)

return path


@pytest.fixture
def generator_ckpt_path(ckpts_dir: Path) -> Path:

checkpoint, config = AudioSeal.parse_model(
"audioseal_wm_16bits",
AudioSealWMConfig,
nbits=16,
)

model = create_generator(config)
model.load_state_dict(checkpoint)

checkpoint = {"xp.cfg": config, "model": model.state_dict()}
path = ckpts_dir / "generator_checkpoint.pth"

torch.save(checkpoint, path)

return path


@pytest.fixture
def detector_ckpt_path(ckpts_dir: Path) -> Path:

checkpoint, config = AudioSeal.parse_model(
"audioseal_detector_16bits",
AudioSealDetectorConfig,
nbits=16,
)

model = create_detector(config)
model.load_state_dict(checkpoint)

checkpoint = {"xp.cfg": config, "model": model.state_dict()}
path = ckpts_dir / "detector_checkpoint.pth"

torch.save(checkpoint, path)

return path


def test_combine_checkpoints(
generator_ckpt_path: Path, detector_ckpt_path: Path, ckpts_dir: Path
):

combined_ckpt_path = ckpts_dir / "combined.pth"

combine_checkpoints(generator_ckpt_path, detector_ckpt_path, combined_ckpt_path)

assert combined_ckpt_path.exists()

generator = torch.load(generator_ckpt_path)
detector = torch.load(detector_ckpt_path)

combined = torch.load(combined_ckpt_path)

for key in generator["model"]:
assert f"generator.{key}" in combined["model"]

for key in detector["model"]:
assert f"detector.{key}" in combined["model"]

# clean up
combined_ckpt_path.unlink()
generator_ckpt_path.unlink()
detector_ckpt_path.unlink()
ckpts_dir.rmdir()
Loading