Skip to content

Commit

Permalink
MAINT: Remove outdated todos, open issue #211
Browse files Browse the repository at this point in the history
  • Loading branch information
ntfrgl committed Dec 22, 2023
1 parent 33bf65a commit f9f158a
Showing 1 changed file with 1 addition and 146 deletions.
147 changes: 1 addition & 146 deletions src/pyunicorn/core/network.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,10 @@
multivariate data and generating time series surrogates.
"""

# general TODO:
# - find segfault problem in a.w. shortest path betweenness
# - rename aw... to nsi... (node splitting invariant)
# - add `typical_weight` argument in all nsi measures
# to implement calculation of "corrected" nsi measures(see paper)
# - implement Newman modularity and iterative division
# - treat type-related ambiguities more thoroughly
# (flatten(), list(...), astype(...) etc.)

#
# Import essential packages
#


import sys # performance testing
import time
from functools import wraps # helper function for decorators
Expand Down Expand Up @@ -1381,7 +1371,6 @@ def edge_list(self):
"""
return nz_coords(self.sp_A)

# TODO: deprecate this and rather use undirected_copy()
def undirected_adjacency(self):
"""
Return the adjacency matrix of the undirected version of the network
Expand Down Expand Up @@ -1554,12 +1543,7 @@ def set_node_attribute(self, attribute_name, values):
:type values: 1D Numpy array [node]
:arg values: The node attribute sequence.
"""
# TODO: add example

# Test whether the data vector has the same length as the number of
# nodes in the graph.
if len(values) == self.N:
# Add node property to igraph Graph object
self.graph.vs.set_attribute_values(attrname=attribute_name,
values=values)
else:
Expand All @@ -1578,7 +1562,6 @@ def node_attribute(self, attribute_name):
:rtype: 1D Numpy array [node]
:return: The node attribute sequence.
"""
# TODO: add example
return np.array(self.graph.vs.get_attribute_values(attribute_name))

def del_node_attribute(self, attribute_name):
Expand All @@ -1587,15 +1570,12 @@ def del_node_attribute(self, attribute_name):
:arg str attribute_name: Name of node attribute to be deleted.
"""
# TODO: add example
del self.graph.vs[attribute_name]

#
# Methods working with link attributes
#

# TODO: verify whether return types are list or numpy array

def average_link_attribute(self, attribute_name):
"""
For each node, return the average of a link attribute
Expand All @@ -1605,7 +1585,6 @@ def average_link_attribute(self, attribute_name):
:rtype: 1d numpy array [node] of floats
"""
# TODO: add example
return self.link_attribute(attribute_name).mean(axis=1)

def link_attribute(self, attribute_name):
Expand All @@ -1617,8 +1596,6 @@ def link_attribute(self, attribute_name):
:rtype: square numpy array [node,node]
:return: Entry [i,j] is the attribute of the link from i to j.
"""
# TODO: add example
# TODO: test this for directed graphs
# Initialize weights array
weights = np.zeros((self.N, self.N))

Expand Down Expand Up @@ -1648,7 +1625,6 @@ def del_link_attribute(self, attribute_name):
:arg str attribute_name: name of link attribute to be deleted
"""
# TODO: add example
if attribute_name in self.cache['paths']:
self.clear_link_attribute(attribute_name)
del self.graph.es[attribute_name]
Expand All @@ -1670,9 +1646,6 @@ def set_link_attribute(self, attribute_name, values):
:type values: square numpy array [node,node]
:arg values: Entry [i,j] is the attribute of the link from i to j.
"""
# TODO: add example and sparse version
# TODO: test this for directed graphs
# Set link attribute in igraph
for e in self.graph.es:
e[attribute_name] = values[e.tuple]

Expand Down Expand Up @@ -1704,7 +1677,6 @@ def degree(self, key=None):
else:
return self.outdegree(key)

# TODO: use directed example here and elsewhere
@cached_var('indegree')
def indegree(self, key=None):
"""
Expand Down Expand Up @@ -3364,36 +3336,6 @@ def worker(batch):
betw_w = worker(to_cy(targets, NODE))
return betw_w / w

def _eigenvector_centrality_slow(self, link_attribute=None):
"""
For each node, return its (weighted) eigenvector centrality.
This is the load on this node from the eigenvector corresponding to the
largest eigenvalue of the (weighted) adjacency matrix, normalized to a
maximum of 1.
:arg str link_attribute: Optional name of the link attribute to be used
as the links' weight. If None, links have weight 1. (Default: None)
:rtype: 1d numpy array [node] of floats
"""
if link_attribute == "topological":
print("WARNING: link_attribute='topological' is deprecated.\n"
+ "Use link_attribute=None instead.")
link_attribute = None

if link_attribute is None:
if self.silence_level <= 1:
print("Calculating topological eigenvector centrality...")

return np.array(self.graph.eigenvector_centrality(weights=None))
else:
if self.silence_level <= 1:
print("Calculating weighted eigenvector centrality...")

return np.array(self.graph.eigenvector_centrality(
weights=link_attribute))

# faster version of the above:
@cached_const('base', 'ev centrality', 'eigenvector centrality')
def eigenvector_centrality(self):
"""
Expand Down Expand Up @@ -3509,7 +3451,7 @@ def closeness(self, link_attribute=None):
as the links' length. If None, links have length 1. (Default: None)
:rtype: 1d numpy array [node] of floats between 0 and 1
"""
# TODO: check and describe behaviour for unconnected networks.
# TODO: check and describe behaviour for unconnected networks
if link_attribute == "topological":
print("WARNING: link_attribute='topological' is deprecated.\n"
+ "Use link_attribute=None instead.")
Expand Down Expand Up @@ -3739,92 +3681,6 @@ def arenas_betweenness(self):

return arenas_betweenness

# TODO: remove this slow version after regression test:
def _arenas_betweenness_slow(self):
print("WARNING: _arenas_betweenness_slow() is deprecated!")

t0 = time.time()

# Initialize the array to hold random walk betweenness
awRandomWalkBetweenness = np.zeros(self.N)

# Random walk betweenness has to be calculated for each component
# separately. Therefore get different components of the graph first
components = self.graph.connected_components()

# Print giant component size
if self.silence_level <= 1:
print(" (giant component size: "
+ str(components.giant().vcount()) + " ("
+ str(components.giant().vcount()
/ float(self.graph.vcount())) + "))")

for i, comp in enumerate(components):
# If the component has size 1, set random walk betweenness to zero
if len(comp) == 1:
awRandomWalkBetweenness[comp[0]] = 0
# For larger components, continue with the calculation
else:
# Get the subgraph corresponding to component i
subgraph = components.subgraph(i)

# Get the subgraph adjacency matrix
adjacency = np.array(subgraph.get_adjacency(type=2).data)

# Get the list of vertex numbers in the subgraph
vertexList = comp

# Extract corresponding area weight vector:
aw = np.zeros(len(vertexList))
for j, vs in enumerate(vertexList):
aw[j] = self.node_weights[vs]

# Generate a Network object representing the subgraph
subnetwork = Network(adjacency, directed=False)

# Get the number of nodes of the subgraph (the component size)
nNodes = subnetwork.N

# Initialize the RWB array
rwb = np.zeros(nNodes)

# Get the subnetworks degree sequence
awDegreeSequence = subnetwork.nsi_degree()

# Clean up
del subgraph, subnetwork

# Get the pMatrix that is modified and inverted
Identity = np.identity(nNodes)
Ap = adjacency + Identity
pMatrix = np.diag(1/awDegreeSequence).dot(Ap).dot(np.diag(aw))

for k in range(nNodes):
# For k and each neighbour of it, set the corresponding
# row of the pMatrix to zero to account for the absorption
# of random walkers at their destination
mask = 1-Ap[k, :]
pMk = pMatrix*(mask.reshape((nNodes, 1)))

# Calculate the b^k matrix
bMatrix = np.dot(np.linalg.inv(Identity-pMk), pMk)

# Perform the summation over source node i
rwb += aw[k] * np.dot(aw.reshape((1, self.N)),
bMatrix).flatten() * mask

rwb /= aw

# Copy results into randomWalkBetweennessArray at the correct
# positions
for j, vs in enumerate(vertexList):
awRandomWalkBetweenness[vs] = rwb[j]

if self.silence_level <= 1:
print("...took", time.time()-t0, "seconds")

return awRandomWalkBetweenness

# parallelized main loop
@staticmethod
def _mpi_nsi_arenas_betweenness(
Expand Down Expand Up @@ -4407,7 +4263,6 @@ def nsi_global_efficiency(self):
:rtype: float
"""
# TODO: check results of examples!
w = self.node_weights
# Set path lengths on diagonal to 1
nsi_dist = self.path_lengths() + np.identity(self.N)
Expand Down

0 comments on commit f9f158a

Please sign in to comment.