Skip to content

Commit

Permalink
updated tests to improve coverage
Browse files Browse the repository at this point in the history
  • Loading branch information
Coerulatus committed Dec 1, 2023
1 parent c0c0e5d commit cfbde34
Show file tree
Hide file tree
Showing 5 changed files with 96 additions and 7 deletions.
24 changes: 23 additions & 1 deletion test/nn/hypergraph/test_hnhn_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,20 @@ def template_layer(self):
incidence_1=incidence_1,
)

def test_forward(self, template_layer):
@pytest.fixture
def template_layer2(self):
"""Initialize and return an HNHN layer."""
self.in_channels = 5
self.hidden_channels = 8

return HNHNLayer(
in_channels=self.in_channels,
hidden_channels=self.hidden_channels,
incidence_1=None,
bias_init="xavier_normal",
)

def test_forward(self, template_layer, template_layer2):
"""Test the forward pass of the HNHN layer."""
n_nodes, n_edges = template_layer.incidence_1.shape

Expand All @@ -33,6 +46,15 @@ def test_forward(self, template_layer):
assert x_0_out.shape == (n_nodes, self.hidden_channels)
assert x_1_out.shape == (n_edges, self.hidden_channels)

n_nodes = 10
n_edges = 20
incidence_1 = torch.randint(0, 2, (n_nodes, n_edges)).float()

x_0_out, x_1_out = template_layer2.forward(x_0, incidence_1)

assert x_0_out.shape == (n_nodes, self.hidden_channels)
assert x_1_out.shape == (n_edges, self.hidden_channels)

return

def test_compute_normalization_matrices(self, template_layer):
Expand Down
19 changes: 18 additions & 1 deletion test/nn/hypergraph/test_hypersage_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,24 @@ def hypersage_layer(self):
out_channels = 30
return HyperSAGELayer(in_channels, out_channels)

def test_forward(self, hypersage_layer):
@pytest.fixture
def hypersage_layer_alpha(self):
"""Return a HyperSAGE layer."""
in_channels = 10
out_channels = 30
return HyperSAGELayer(in_channels, out_channels, alpha=1)

def test_forward(self, hypersage_layer, hypersage_layer_alpha):
"""Test the forward pass of the HyperSAGE layer."""
x_2 = torch.randn(3, 10)
incidence_2 = torch.tensor(
[[1, 0], [0, 1], [1, 1]], dtype=torch.float32
).to_sparse()
output = hypersage_layer.forward(x_2, incidence_2)
output2 = hypersage_layer_alpha.forward(x_2, incidence_2)

assert output.shape == (3, 30)
assert output2.shape == (3, 30)

def test_forward_with_invalid_input(self, hypersage_layer):
"""Test the forward pass of the HyperSAGE layer with invalid input."""
Expand Down Expand Up @@ -65,6 +75,13 @@ def test_update_sigmoid(self, hypersage_layer):
assert torch.is_tensor(updated)
assert updated.shape == (10, 20)

def test_update_invalid(self, hypersage_layer):
"""Test the update function with update_func = "invalid"."""
hypersage_layer.update_func = "invalid"
inputs = torch.randn(10, 20)
with pytest.raises(RuntimeError):
hypersage_layer.update(inputs)

def test_aggregation_invald(self, hypersage_layer):
"""Test the aggregation function with invalid mode."""
x_messages = torch.zeros(3, 10)
Expand Down
35 changes: 32 additions & 3 deletions test/nn/hypergraph/test_unigcnii_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,36 @@ def unigcnii_layer(self):
in_channels=in_channels, hidden_channels=in_channels, alpha=alpha, beta=beta
)

def test_forward(self, unigcnii_layer):
@pytest.fixture
def unigcnii_layer2(self):
"""Return a uniGCNII layer."""
in_channels = 10
alpha = 0.1
beta = 0.1
return UniGCNIILayer(
in_channels=in_channels,
hidden_channels=in_channels,
alpha=alpha,
beta=beta,
use_norm=True,
)

def test_forward(self, unigcnii_layer, unigcnii_layer2):
"""Test the forward pass."""
n_nodes, in_channels = 3, 10
x_0 = torch.randn(n_nodes, in_channels)
incidence_1 = torch.tensor([[1, 0], [1, 1], [0, 1]], dtype=torch.float32)
x_0, _ = unigcnii_layer.forward(x_0, incidence_1)
incidence_1 = torch.tensor(
[[1, 1, 0], [1, 1, 1], [0, 1, 1]], dtype=torch.float32
)
x_0, x_1 = unigcnii_layer.forward(x_0, incidence_1)

assert x_0.shape == torch.Size([n_nodes, in_channels])
assert x_1.shape == torch.Size([3, in_channels])

x_0, x_1 = unigcnii_layer2.forward(x_0, incidence_1)

assert x_0.shape == torch.Size([n_nodes, in_channels])
assert x_1.shape == torch.Size([3, in_channels])

def test_forward_with_skip(self):
"""Test the forward pass where alpha=1 and beta=0.
Expand All @@ -45,3 +67,10 @@ def test_forward_with_skip(self):
x_0, _ = layer(x_0, incidence_1, x_skip)

torch.testing.assert_close(x_0, x_skip, rtol=1e-4, atol=1e-4)

def test_reset_params(self, unigcnii_layer):
"""Test reset parameters."""
unigcnii_layer.linear.weight.requires_grad = False
unigcnii_layer.linear.weight.fill_(0)
unigcnii_layer.reset_parameters()
assert torch.max(unigcnii_layer.linear.weight) > 0
13 changes: 12 additions & 1 deletion test/nn/hypergraph/test_unigin_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,13 @@ def UniGIN_layer(self):
self.in_channels = 10
return UniGINLayer(in_channels=self.in_channels)

def test_forward(self, UniGIN_layer):
@pytest.fixture
def UniGIN_layer2(self):
"""Return a UniGIN layer."""
self.in_channels = 10
return UniGINLayer(in_channels=self.in_channels, use_norm=True)

def test_forward(self, UniGIN_layer, UniGIN_layer2):
"""Test the forward pass of the UniGIN layer."""
n_nodes, n_edges = 2, 3
incidence = torch.from_numpy(np.random.rand(n_nodes, n_edges)).to_sparse()
Expand All @@ -25,3 +31,8 @@ def test_forward(self, UniGIN_layer):

assert x_0.shape == torch.Size([n_nodes, self.in_channels])
assert x_1.shape == torch.Size([n_edges, self.in_channels])

x_0, x_1 = UniGIN_layer2.forward(x_0, incidence)

assert x_0.shape == torch.Size([n_nodes, self.in_channels])
assert x_1.shape == torch.Size([n_edges, self.in_channels])
12 changes: 11 additions & 1 deletion test/nn/hypergraph/test_unisage_layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,23 @@ def uniSAGE_layer(self):
out_channels = 30
return UniSAGELayer(in_channels, out_channels)

def test_forward(self, uniSAGE_layer):
@pytest.fixture
def uniSAGE_layer2(self):
"""Fixture for uniSAGE layer."""
in_channels = 10
out_channels = 30
return UniSAGELayer(in_channels, out_channels, use_norm=True)

def test_forward(self, uniSAGE_layer, uniSAGE_layer2):
"""Test forward pass."""
x = torch.randn(3, 10)
incidence = torch.tensor([[1, 1, 0], [1, 1, 1], [0, 1, 1]], dtype=torch.float32)
x_0, x_1 = uniSAGE_layer.forward(x, incidence)
assert x_0.shape == torch.Size([3, 30])
assert x_1.shape == torch.Size([3, 30])
x_0, x_1 = uniSAGE_layer2.forward(x, incidence)
assert x_0.shape == torch.Size([3, 30])
assert x_1.shape == torch.Size([3, 30])

def test_sum_aggregator(self):
"""Test sum aggregator."""
Expand Down

0 comments on commit cfbde34

Please sign in to comment.