Skip to content

Commit

Permalink
this should always be on to save beginners from weight decay issues
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Jun 27, 2024
1 parent 044a62f commit 2e74ed0
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '1.31.1',
version = '1.31.3',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
4 changes: 2 additions & 2 deletions x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1234,7 +1234,7 @@ def __init__(
use_adaptive_layernorm = False,
use_adaptive_rmsnorm = False,
use_adaptive_layerscale = False, # paired with use_adaptive_layernorm for ada-ln-zero from DiT paper
norm_add_unit_offset = False,
norm_add_unit_offset = True,
dim_condition = None,
adaptive_condition_mlp = False,
adaptive_condition_mlp_expansion = 4,
Expand Down Expand Up @@ -1403,7 +1403,7 @@ def __init__(

self.post_branch_fn_needs_condition = post_branch_fn_needs_condition

if not post_branch_fn_needs_condition and norm_add_unit_offset:
if exists(post_branch_fn) and not post_branch_fn_needs_condition and norm_add_unit_offset:
post_branch_fn = partial(post_branch_fn, unit_offset = 1.)

# setup mlp for conditioning
Expand Down

0 comments on commit 2e74ed0

Please sign in to comment.