From 3a9d10342f17a894df44405e476edb631f01d447 Mon Sep 17 00:00:00 2001 From: Waqar Shahid Qureshi Date: Fri, 11 Aug 2023 18:24:02 +0100 Subject: [PATCH] for swinV2 use the following line #print("Error is here: ActivationsAndGradients") #self.activations_and_grads = ActivationsAndGradients(self.model, [target_layer], reshape_transform) #for swin of vision transformer comment self.activations_and_grads = ActivationsAndGradients(self.model, target_layer, reshape_transform) --- .../feature_factorization/deep_feature_factorization.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pytorch_grad_cam/feature_factorization/deep_feature_factorization.py b/pytorch_grad_cam/feature_factorization/deep_feature_factorization.py index b9db2c3e3..61a937f4a 100644 --- a/pytorch_grad_cam/feature_factorization/deep_feature_factorization.py +++ b/pytorch_grad_cam/feature_factorization/deep_feature_factorization.py @@ -53,9 +53,9 @@ def __init__(self, ): self.model = model self.computation_on_concepts = computation_on_concepts - self.activations_and_grads = ActivationsAndGradients( - self.model, [target_layer], reshape_transform) - + #print("Error is here: ActivationsAndGradients") + #self.activations_and_grads = ActivationsAndGradients(self.model, [target_layer], reshape_transform) #for swin of vision transformer comment + self.activations_and_grads = ActivationsAndGradients(self.model, target_layer, reshape_transform) def __call__(self, input_tensor: torch.Tensor, n_components: int = 16):