Skip to content

Commit

Permalink
add an assert for relative positional keyword arguments
Browse files Browse the repository at this point in the history
  • Loading branch information
lucidrains committed Feb 8, 2021
1 parent 64ee68b commit 17617af
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 1 deletion.
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
setup(
name = 'x-transformers',
packages = find_packages(exclude=['examples']),
version = '0.8.1',
version = '0.8.2',
license='MIT',
description = 'X-Transformers - Pytorch',
author = 'Phil Wang',
Expand Down
2 changes: 2 additions & 0 deletions x_transformers/x_transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -404,6 +404,8 @@ def __init__(

self.has_pos_emb = position_infused_attn or rel_pos_bias
self.pia_pos_emb = FixedPositionalEmbedding(dim) if position_infused_attn else None

assert rel_pos_num_buckets < rel_pos_max_distance, 'number of relative position buckets must be less than the relative position max distance'
self.rel_pos = RelativePositionBias(causal = causal, heads = heads, num_buckets = rel_pos_num_buckets, max_distance = rel_pos_max_distance) if rel_pos_bias else None

self.pre_norm = pre_norm and not residual_attn
Expand Down

0 comments on commit 17617af

Please sign in to comment.