Skip to content

Commit a50713c

Browse files
committed
Fix #2272
1 parent ebbe530 commit a50713c

File tree

1 file changed

+1
-1
lines changed

1 file changed

+1
-1
lines changed

timm/models/vision_transformer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -629,7 +629,7 @@ def reset_classifier(self, num_classes: int, global_pool: Optional[str] = None):
629629
assert global_pool in ('', 'avg', 'avgmax', 'max', 'token', 'map')
630630
if global_pool == 'map' and self.attn_pool is None:
631631
assert False, "Cannot currently add attention pooling in reset_classifier()."
632-
elif global_pool != 'map ' and self.attn_pool is not None:
632+
elif global_pool != 'map' and self.attn_pool is not None:
633633
self.attn_pool = None # remove attention pooling
634634
self.global_pool = global_pool
635635
self.head = nn.Linear(self.embed_dim, num_classes) if num_classes > 0 else nn.Identity()

0 commit comments

Comments
 (0)