Skip to content

Optimize log_validation in train_controlnet_flax #3110

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Apr 18, 2023
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 19 additions & 16 deletions examples/controlnet/train_controlnet_flax.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,20 +76,11 @@ def image_grid(imgs, rows, cols):
return grid


def log_validation(controlnet, controlnet_params, tokenizer, args, rng, weight_dtype):
logger.info("Running validation... ")
def log_validation(pipeline, pipeline_params, controlnet_params, tokenizer, args, rng, weight_dtype):
logger.info("Running validation...")

pipeline, params = FlaxStableDiffusionControlNetPipeline.from_pretrained(
args.pretrained_model_name_or_path,
tokenizer=tokenizer,
controlnet=controlnet,
safety_checker=None,
dtype=weight_dtype,
revision=args.revision,
from_pt=args.from_pt,
)
params = jax_utils.replicate(params)
params["controlnet"] = controlnet_params
pipeline_params = pipeline_params.copy()
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why do we need to copy the params?

pipeline_params["controlnet"] = controlnet_params

num_samples = jax.device_count()
prng_seed = jax.random.split(rng, jax.device_count())
Expand Down Expand Up @@ -121,7 +112,7 @@ def log_validation(controlnet, controlnet_params, tokenizer, args, rng, weight_d
images = pipeline(
prompt_ids=prompt_ids,
image=processed_image,
params=params,
params=pipeline_params,
prng_seed=prng_seed,
num_inference_steps=50,
jit=True,
Expand Down Expand Up @@ -176,6 +167,7 @@ def save_model_card(repo_id: str, image_logs=None, base_model=str, repo_folder=N
- text-to-image
- diffusers
- controlnet
- jax-diffusers-event
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Cool, we just need to remember this is specific for the event and remove later.

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
- jax-diffusers-event

think we can remove this line here and keep it in the script on the event repo

inference: true
---
"""
Expand Down Expand Up @@ -800,6 +792,17 @@ def main():
]:
controlnet_params[key] = unet_params[key]

pipeline, pipeline_params = FlaxStableDiffusionControlNetPipeline.from_pretrained(
args.pretrained_model_name_or_path,
tokenizer=tokenizer,
controlnet=controlnet,
safety_checker=None,
dtype=weight_dtype,
revision=args.revision,
from_pt=args.from_pt,
)
pipeline_params = jax_utils.replicate(pipeline_params)

# Optimization
if args.scale_lr:
args.learning_rate = args.learning_rate * total_train_batch_size
Expand Down Expand Up @@ -1073,7 +1076,7 @@ def l2(xs):
and global_step % args.validation_steps == 0
and jax.process_index() == 0
):
_ = log_validation(controlnet, state.params, tokenizer, args, validation_rng, weight_dtype)
_ = log_validation(pipeline, pipeline_params, state.params, tokenizer, args, validation_rng, weight_dtype)

if global_step % args.logging_steps == 0 and jax.process_index() == 0:
if args.report_to == "wandb":
Expand Down Expand Up @@ -1105,7 +1108,7 @@ def l2(xs):
if args.validation_prompt is not None:
if args.profile_validation:
jax.profiler.start_trace(args.output_dir)
image_logs = log_validation(controlnet, state.params, tokenizer, args, validation_rng, weight_dtype)
image_logs = log_validation(pipeline, pipeline_params, state.params, tokenizer, args, validation_rng, weight_dtype)
if args.profile_validation:
jax.profiler.stop_trace()
else:
Expand Down