diff --git a/bfasst/flows/analyze_dataset.py b/bfasst/flows/analyze_dataset.py new file mode 100644 index 00000000..b92bc258 --- /dev/null +++ b/bfasst/flows/analyze_dataset.py @@ -0,0 +1,48 @@ +"""Analyze dataset metrics.""" + +from pathlib import Path +import pathlib +from bfasst.flows.flow import FlowNoDesign +from bfasst.tools.dataset_metrics.accumulate_metrics import AccumulateMetrics +from bfasst.tools.dataset_metrics.graph_metrics import GraphMetrics + + +class AnalyzeDataset(FlowNoDesign): + """Analyze dataset metrics.""" + + def __init__(self, dataset): + # pylint: disable=duplicate-code + super().__init__() + self.dataset = Path(dataset) + + # only used for configuring ninja rule snippets + self.graph_metrics_default_tool = GraphMetrics(self, None, None) + self.accumulate_metrics_tool = AccumulateMetrics(self, None) + # pylint: enable=duplicate-code + + def create_build_snippets(self): + # get the size of the dataset + directories = [x for x in self.dataset.iterdir() if x.is_dir()] + iterations = len(directories) + pieces = [] + + for i in range(1, iterations + 1): + num = int(directories[i - 1].name.split("_")[-1]) + graph_metrics_tool = GraphMetrics( + self, directories[i - 1] / f"{directories[i - 1].name}.dump", num + ) + pieces.append(graph_metrics_tool.metrics_path) + graph_metrics_tool.create_build_snippets() + + AccumulateMetrics(self, pieces).create_build_snippets() + + @classmethod + def flow_build_dir_name(cls) -> str: + """Get the name of the build directory for this flow""" + return "dataset_metrics" + + def add_ninja_deps(self, deps): + super().add_ninja_deps(deps) + + def get_top_level_flow_path(self): + return pathlib.Path(__file__).resolve() diff --git a/bfasst/flows/flow_descriptions.yaml b/bfasst/flows/flow_descriptions.yaml index 53282896..a10cb3ad 100644 --- a/bfasst/flows/flow_descriptions.yaml +++ b/bfasst/flows/flow_descriptions.yaml @@ -156,4 +156,10 @@ flows: class: OpenTitan external_tools: - vivado - - opentitan \ No newline at end of file + - opentitan + +- name: AnalyzeDataset + description: Compute Metrics on an FPGA Circuit dataset for GNNs. + module: analyze_dataset + class: AnalyzeDataset + \ No newline at end of file diff --git a/bfasst/paths.py b/bfasst/paths.py index 9dbeb27f..db5fe500 100644 --- a/bfasst/paths.py +++ b/bfasst/paths.py @@ -20,6 +20,8 @@ COMMON_TOOLS_PATH = TOOLS_PATH / "common" +DATASET_METRICS_TOOLS_PATH = TOOLS_PATH / "dataset_metrics" + REV_BIT_TOOLS_PATH = TOOLS_PATH / "rev_bit" NINJA_TRANSFORM_TOOLS_PATH = TOOLS_PATH / "transform" diff --git a/bfasst/tools/dataset_metrics/accumulate_metrics.py b/bfasst/tools/dataset_metrics/accumulate_metrics.py new file mode 100644 index 00000000..7869749c --- /dev/null +++ b/bfasst/tools/dataset_metrics/accumulate_metrics.py @@ -0,0 +1,46 @@ +"""Accumulate metrics from the graph_metrics tool.""" + +import chevron + +from bfasst.tools.tool import ToolBase +from bfasst.paths import BUILD_PATH, NINJA_BUILD_PATH, DATASET_METRICS_TOOLS_PATH, BFASST_UTILS_PATH + + +class AccumulateMetrics(ToolBase): + """Accumulate metrics from the graph_metrics tool.""" + + def __init__(self, flow, pieces): + super().__init__(flow) + self.pieces = pieces + self.build_path = BUILD_PATH / "dataset_metrics" + self.metrics_path = self.build_path / "master_metrics.log" + self.summary_stats = self.build_path / "summary_stats.log" + + self._init_outputs() + self.rule_snippet_path = ( + DATASET_METRICS_TOOLS_PATH / "accumulate_metrics_rules.ninja.mustache" + ) + + def create_build_snippets(self): + with open(DATASET_METRICS_TOOLS_PATH / "accumulate_metrics_build.ninja.mustache", "r") as f: + build = chevron.render( + f, + { + "metrics_file": self.metrics_path, + "summary_stats": self.summary_stats, + "aggregation_dir": self.build_path, + "pieces": self.pieces, + "accumulate_metrics_util": BFASST_UTILS_PATH / "accumulate_metrics.py", + }, + ) + + with open(NINJA_BUILD_PATH, "a") as f: + f.write(build) + + def _init_outputs(self): + self.outputs["metrics_path"] = self.metrics_path + self.outputs["summary_stats"] = self.summary_stats + + def add_ninja_deps(self, deps): + self._add_ninja_deps_default(deps, __file__) + deps.append(BFASST_UTILS_PATH / "accumulate_metrics.py") diff --git a/bfasst/tools/dataset_metrics/accumulate_metrics_build.ninja.mustache b/bfasst/tools/dataset_metrics/accumulate_metrics_build.ninja.mustache new file mode 100644 index 00000000..2b0b775c --- /dev/null +++ b/bfasst/tools/dataset_metrics/accumulate_metrics_build.ninja.mustache @@ -0,0 +1,4 @@ +build {{ metrics_file }} {{ summary_stats }}: accumulate_metrics {{ aggregation_dir }} | {{#pieces}}{{.}} {{/pieces}} {{ accumulate_metrics_util }} + metrics_file = {{ metrics_file }} + summary_stats = {{ summary_stats }} + diff --git a/bfasst/tools/dataset_metrics/accumulate_metrics_rules.ninja.mustache b/bfasst/tools/dataset_metrics/accumulate_metrics_rules.ninja.mustache new file mode 100644 index 00000000..2454a7eb --- /dev/null +++ b/bfasst/tools/dataset_metrics/accumulate_metrics_rules.ninja.mustache @@ -0,0 +1,4 @@ +rule accumulate_metrics + command = python {{ bfasst_path }}/bfasst/utils/accumulate_metrics.py $in -m $metrics_file -s $summary_stats + description = accumulate metrics from $in to produce master_metrics and summary_stats files + diff --git a/bfasst/tools/dataset_metrics/graph_metrics.py b/bfasst/tools/dataset_metrics/graph_metrics.py new file mode 100644 index 00000000..f00ebf9c --- /dev/null +++ b/bfasst/tools/dataset_metrics/graph_metrics.py @@ -0,0 +1,46 @@ +"""Create the rule and build snippets for computing gnn dataset metrics.""" + +import chevron + +from bfasst.tools.tool import ToolBase +from bfasst.paths import BUILD_PATH, NINJA_BUILD_PATH, DATASET_METRICS_TOOLS_PATH, BFASST_UTILS_PATH + + +class GraphMetrics(ToolBase): + """Create the rule and build snippets for computing gnn dataset metrics.""" + + def __init__( + self, + flow, + graph, + num, + ): + super().__init__(flow) + self.graph = graph + self.num = num + self.build_path = BUILD_PATH / "dataset_metrics" + self.metrics_path = self.build_path / f"metrics_{num}.log" + + self._init_outputs() + self.rule_snippet_path = DATASET_METRICS_TOOLS_PATH / "process_graph_rules.ninja.mustache" + + def create_build_snippets(self): + with open(DATASET_METRICS_TOOLS_PATH / "process_graph_build.ninja.mustache", "r") as f: + build = chevron.render( + f, + { + "output": self.metrics_path, + "graph": self.graph, + "process_graph_util": BFASST_UTILS_PATH / "process_graph.py", + }, + ) + + with open(NINJA_BUILD_PATH, "a") as f: + f.write(build) + + def _init_outputs(self): + self.outputs["metrics_path"] = self.metrics_path + + def add_ninja_deps(self, deps): + self._add_ninja_deps_default(deps, __file__) + deps.append(BFASST_UTILS_PATH / "process_graph.py") diff --git a/bfasst/tools/dataset_metrics/process_graph_build.ninja.mustache b/bfasst/tools/dataset_metrics/process_graph_build.ninja.mustache new file mode 100644 index 00000000..954d06ab --- /dev/null +++ b/bfasst/tools/dataset_metrics/process_graph_build.ninja.mustache @@ -0,0 +1,2 @@ +build {{ output }}: process_graph {{ graph }} | {{ process_graph_util }} + diff --git a/bfasst/tools/dataset_metrics/process_graph_rules.ninja.mustache b/bfasst/tools/dataset_metrics/process_graph_rules.ninja.mustache new file mode 100644 index 00000000..7bde2576 --- /dev/null +++ b/bfasst/tools/dataset_metrics/process_graph_rules.ninja.mustache @@ -0,0 +1,4 @@ +rule process_graph + command = python {{ bfasst_path }}/bfasst/utils/process_graph.py $in -o $out + description = compute metrics on $in and save them to $out + diff --git a/bfasst/utils/accumulate_metrics.py b/bfasst/utils/accumulate_metrics.py new file mode 100644 index 00000000..8461ffa7 --- /dev/null +++ b/bfasst/utils/accumulate_metrics.py @@ -0,0 +1,148 @@ +"""Accumulate metrics from graphs in a dataset after computing them for all graphs""" + +import argparse +import logging +import json +from pathlib import Path +import statistics + +logger = logging.getLogger(__name__) + + +def main(): + """Load the graph, convert to adj_list, and compute metrics.""" + # ArgParse + args = get_args() + + # Logging (for debug, don't use in parallel) + logging.basicConfig( + level=logging.DEBUG if args.verbose else logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", + ) + + # Initialize the master dictionary + master_metrics_output = args.m if args.m else "master_metrics.log" + stats_summary_output = args.s if args.s else "summary_statistics.log" + + # Iterate through the files in the analysis directory + master_metrics = compute_master_metrics( + args.analysis_dir, master_metrics_output, stats_summary_output + ) + + # sort the values for each metric after merging + master_metrics = sort_metrics(master_metrics) + + # Compute the stats for each metric + stats_summary = get_stats_summary(master_metrics) + + # write master_metrics to a file + with open(master_metrics_output, "w") as f: + f.write(json.dumps(master_metrics, indent=4)) + + with open(stats_summary_output, "w") as f: + f.write(json.dumps(stats_summary, indent=4)) + + +def get_args(): + """Get the command line arguments.""" + parser = argparse.ArgumentParser(description="Compute metrics on a graph.") + parser.add_argument( + "analysis_dir", help="The path to the folder containing all analysis files for all graphs." + ) + parser.add_argument("-v", "--verbose", action="store_true", help="Enable debug logging.") + parser.add_argument("-m", help="The name of the metrics file to create") + parser.add_argument( + "-s", help="The name of the stats (5-num summary, mean, stddev) file to create" + ) + return parser.parse_args() + + +def compute_master_metrics(analysis_dir, master_metrics_output, stats_summary_output): + """Compute the master metrics from the analysis directory.""" + master_metrics = {} + for file in Path(analysis_dir).iterdir(): + if file.is_dir(): + continue + + if file.name in ( + master_metrics_output, + stats_summary_output, + # Skip the master_metrics and stats_summary files + # Even if the user has specified different names + # for this run + "master_metrics.log", + "summary_stats.log", + ): + continue + + logger.debug("Processing %s", file) + + with open(file, "r") as f: + graph_metrics = json.loads(f.readline()) + + for ip, metrics in graph_metrics.items(): + # Initialize the IP entry in the master dictionary if it doesn't exist + if ip not in master_metrics: + master_metrics[ip] = {} + + for metric, values in metrics.items(): + # Initialize the metric entry if it doesn't exist + if metric not in master_metrics[ip]: + master_metrics[ip][metric] = [] + + # Concatenate the lists + master_metrics[ip][metric].extend(values) + + return master_metrics + + +def sort_metrics(metrics): + """Sort the values for each metric in the dictionary.""" + for ip, _ in metrics.items(): + for metric in metrics[ip]: + metrics[ip][metric] = sorted(metrics[ip][metric]) + return metrics + + +def get_stats_summary(master_metrics): + """Compute the 5-number summary, mean, and standard deviation for each metric.""" + summary = {} + for ip, metrics in master_metrics.items(): + for metric, values in metrics.items(): + # Calculate statistics + if values: # Check if the list is not empty + min_val, first_quartile, median, third_quartile, max_val = five_number_summary( + values + ) + mean = sum(values) / len(values) + stddev = statistics.stdev(values) if len(values) > 1 else 0.0 + + # Prepare the summary dictionary + if ip not in summary: + summary[ip] = {} + + summary[ip][metric] = { + "min": min_val, + "Q1": first_quartile, + "median": median, + "Q3": third_quartile, + "max": max_val, + "mean": mean, + "stddev": stddev, + } + return summary + + +def five_number_summary(data): + """Compute the 5-number summary for the given data.""" + n = len(data) + min_val = data[0] + max_val = data[-1] + first_quartile = data[n // 4] + median = data[n // 2] + third_quartile = data[(3 * n) // 4] + return min_val, first_quartile, median, third_quartile, max_val + + +if __name__ == "__main__": + main() diff --git a/bfasst/utils/process_graph.py b/bfasst/utils/process_graph.py new file mode 100644 index 00000000..b46375e2 --- /dev/null +++ b/bfasst/utils/process_graph.py @@ -0,0 +1,467 @@ +"""Compute metrics on a single graph in a dataset.""" + +import argparse +from collections import defaultdict, deque +import logging +import os +import json + +logger = logging.getLogger(__name__) + + +def main(): + """Load the graph, convert to adj_list, and compute metrics.""" + # ArgParse + parser = argparse.ArgumentParser(description="Compute metrics on a graph.") + parser.add_argument("graph", help="The graph to compute metrics on.") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable debug logging.") + parser.add_argument("-o", help="The name of the output file to create") + + parser.add_argument("--order", action="store_true", help="Compute the order of the graph.") + parser.add_argument("--size", action="store_true", help="Compute the size of the graph.") + parser.add_argument( + "--degree", action="store_true", help="Compute the average degree of the graph." + ) + parser.add_argument( + "--diameter", action="store_true", help="Compute the average diameter of the graph." + ) + parser.add_argument( + "--component_count", action="store_true", help="Compute the number of components." + ) + parser.add_argument( + "--global_clustering_coeff", + action="store_true", + help="Compute the global clustering coefficient of the graph.", + ) + parser.add_argument( + "--k_core", action="store_true", help="Compute the maximal k-core of the graph." + ) + parser.add_argument("--all", action="store_true", help="Compute all metrics.", default=True) + + args = parser.parse_args() + + # Logging (for debug, don't use in parallel) + logging.basicConfig( + level=logging.DEBUG if args.verbose else logging.INFO, + format="%(asctime)s - %(levelname)s - %(message)s", + ) + + component_nodes, component_edges = load_graph(args.graph) + + adj_lists = convert_to_adj_list(component_nodes, component_edges) + + # Compute metrics for each component + metrics_per_ip = compute_metrics_per_ip(adj_lists, args) + + # write metrics to a file + output = args.o if args.o else "metrics.log" + with open(os.path.abspath(output), "w") as f: + f.write(json.dumps(metrics_per_ip)) + + +def load_graph(graph): + """Load a graph from a file.""" + graph_path = os.path.abspath(graph) + + component_nodes = defaultdict(list) # {ip_inst: [node1, node2, ...]} + component_edges = defaultdict(list) # {ip_inst: [(node1, node2), ...]} + section = None # track the section: nodes or edges + + with open(graph_path, "r") as f: + for line in f: + line = line.strip() + + # Detect the beginning of a section + if line.startswith("(("): + if section is None: + section = "nodes" + else: + section = "edges" + line = line[1:].strip() # Remove the opening '(' + + # Detect the end of a section + if line == ")": + continue + + if not line: + continue # Skip empty lines + + if section == "nodes": + parts = line.replace('"', "").split() + node_id, label = parts[0], parts[2] + node_id = node_id.replace("(", "") + if "ip" not in label: + label = "fabric" + component_nodes[label].append(node_id) + + elif section == "edges": + node1, node2 = line.replace('"', "").replace("(", "").replace(")", "").split() + # get the label for both nodes + node1_label = find_label(node1, component_nodes) + node2_label = find_label(node2, component_nodes) + if node1_label == node2_label: + component_edges[node1_label].append((node1, node2)) + + return component_nodes, component_edges + + +def find_label(node, component_nodes): + """Find the label for a node.""" + for label, nodes in component_nodes.items(): + if node in nodes: + return label + return None + + +def convert_to_adj_list(component_nodes, component_edges): + """Convert the graph to adjacency lists.""" + adj_lists = {} + for label, nodes in component_nodes.items(): + adj_lists[label] = {} + for node in nodes: + adj_lists[label][node] = [] + + for label, edges in component_edges.items(): + for node1, node2 in edges: + adj_lists[label][node1].append(node2) + adj_lists[label][node2].append(node1) + + return adj_lists + + +def compute_metrics_per_ip(adj_lists, args): + """Compute metrics for each IP in the graph.""" + metrics_per_ip = {} + for label, adj_list in adj_lists.items(): + + # Compute components + components = compute_components(adj_list) + + # set up default entries + ip = get_ip_name_from_label(label) + if ip not in metrics_per_ip: + metrics_per_ip[ip] = { + "instance_order": [], + "component_orders": [], + "instance_size": [], + "component_sizes": [], + "avg_degree": [], + "avg_diameter": [], + "component_diameters": [], + "component_count": [], + "global_clustering_coeff": [], + "max_k_core": [], + } + + # Order + if args.all or args.order: + metrics_per_ip[ip]["instance_order"].append(len(adj_list)) + + # Component-wise order + if args.all or args.order: + component_orders = compute_component_orders(components) + metrics_per_ip[ip]["component_orders"].extend(component_orders) + + # Size + if args.all or args.size: + edge_count = compute_size(adj_list) + metrics_per_ip[ip]["instance_size"].append(edge_count) + + # Component-wise size + if args.all or args.size: + component_sizes = compute_component_sizes(components, adj_list) + metrics_per_ip[ip]["component_sizes"].extend(component_sizes) + + # Avg Degree + if args.all or args.degree: + avg_desgree = compute_average_degree(adj_list) + metrics_per_ip[ip]["avg_degree"].append(avg_desgree) + + # Avg Diameter + if args.all or args.diameter: + avg_diameter = compute_average_diameter(components, adj_list) + metrics_per_ip[ip]["avg_diameter"].append(avg_diameter) + + # Component Diameters + if args.all or args.diameter: + component_diameters = compute_component_diameters(components, adj_list) + metrics_per_ip[ip]["component_diameters"].extend(component_diameters) + + # Component Count + if args.all or args.component_count: + metrics_per_ip[ip]["component_count"].append(len(components)) + + # Global Clustering Coefficient + if args.all or args.global_clustering_coeff: + global_clustering = compute_global_clustering(adj_list) + metrics_per_ip[ip]["global_clustering_coeff"].append(global_clustering) + + # K-Core + if args.all or args.k_core: + max_k, _ = compute_k_core(adj_list) + metrics_per_ip[ip]["max_k_core"].append(max_k) + + # Debug (verbose flag only) + logger.debug("IP: %s", ip) + logger.debug("Component: %s", label) + logger.debug("Nodes: %s", len(adj_list)) + logger.debug("Edges: %s", edge_count) + logger.debug("") + + return metrics_per_ip + + +def compute_components(adj_list): + """Compute the components of a graph.""" + uf = UnionFind() + + for u in adj_list: + for v in adj_list[u]: + uf.union(u, v) + + components = {} + for node in adj_list: + root = uf.find(node) + if root not in components: + components[root] = set() + components[root].add(node) + + return components + + +def compute_component_orders(components): + """Compute the order of each component in a graph.""" + orders = [] + for component in components.values(): + orders.append(len(component)) + return orders + + +def compute_size(adj_list): + edge_count = 0 + for node in adj_list: + for _ in adj_list[node]: + edge_count += 1 + return edge_count // 2 + + +def compute_component_sizes(components, adj_list): + """Compute the size of each component in a graph.""" + sizes = [] + for component in components.values(): + edge_count = 0 + for node in component: + for neighbor in adj_list[node]: + if neighbor in component: + edge_count += 1 + sizes.append(edge_count // 2) + return sizes + + +def compute_average_diameter(components, adj_list): + """Compute the average diameter of a graph.""" + uf = UnionFind() + + for u in adj_list: + for v in adj_list[u]: + uf.union(u, v) + + components = {} + for node in adj_list: + root = uf.find(node) + if root not in components: + components[root] = set() + components[root].add(node) + + diameters = [] + + for component in components.values(): + node = next(iter(component)) + u, _ = bfs_farthest(adj_list, node) + _, diameter = bfs_farthest(adj_list, u) + diameters.append(diameter) + + return sum(diameters) / len(diameters) if diameters else 0 + + +def compute_component_diameters(components, adj_list): + """Compute the diameter of each component in a graph.""" + diameters = [] + for component in components.values(): + node = next(iter(component)) + u, _ = bfs_farthest(adj_list, node) + _, diameter = bfs_farthest(adj_list, u) + diameters.append(diameter) + return diameters + + +def compute_average_degree(adj_list): + degrees = [] + for node in adj_list: + degrees.append(len(adj_list[node])) + return sum(degrees) / len(degrees) if degrees else 0 + + +class UnionFind: + """Union-find data structure.""" + + def __init__(self): + self.parent = {} + self.rank = {} + + def add(self, u): + if u not in self.parent: + self.parent[u] = u + self.rank[u] = 0 + + def find(self, u): + """Find the parent of a node.""" + # Ensure u is in the union find + self.add(u) + + # Path compression + if self.parent[u] != u: + self.parent[u] = self.find(self.parent[u]) + return self.parent[u] + + def union(self, u, v): + """Union two nodes.""" + self.add(u) + self.add(v) + pu, pv = self.find(u), self.find(v) + + if pv != pu: + if self.rank[pu] > self.rank[pv]: + self.parent[pv] = pu + elif self.rank[pv] > self.rank[pu]: + self.parent[pu] = pv + else: + self.parent[pv] = pu + self.rank[pu] += 1 + + +def bfs_farthest(adj_list, start_node): + """Breadth-first search to find the farthest node from a starting node.""" + queue = [(start_node, 0)] + visited = {start_node} + farthest_node = start_node + max_distance = 0 + + while queue: + node, distance = queue.pop(0) + if distance > max_distance: + max_distance = distance + farthest_node = node + + for neighbor in adj_list[node]: + if neighbor not in visited: + queue.append((neighbor, distance + 1)) + visited.add(neighbor) + + return farthest_node, max_distance + + +def compute_global_clustering(adj_list): + """Compute the global clustering coefficient of a graph.""" + closed_triplets = 0 + total_triplets = 0 + visited_pairs = set() + + for node in adj_list: + neighbors = set(adj_list[node]) + degree = len(neighbors) + + total_triplets += degree * (degree - 1) // 2 + + for neighbor in neighbors: + if (node, neighbor) in visited_pairs or (neighbor, node) in visited_pairs: + continue + + common_neighbors = neighbors.intersection(set(adj_list[neighbor])) + closed_triplets += len(common_neighbors) + visited_pairs.add((node, neighbor)) + + return (3 * closed_triplets) / total_triplets if total_triplets else 0 + + +def compute_k_core(adj_list): + """Compute the k-core of a graph.""" + degree = {node: len(neighbors) for node, neighbors in adj_list.items()} + max_k = 0 + k_core_subgraph = {} + + k = 1 + while True: + queue = deque(node for node, d in degree.items() if d <= k) + + while queue: + node = queue.popleft() + for neighbor in adj_list[node]: + if degree[neighbor] >= k: + degree[neighbor] -= 1 + if degree[neighbor] < k: + queue.append(neighbor) + degree[node] = 0 + + k_core = { + node: {neighbor for neighbor in neighbors if degree[neighbor] >= k} + for node, neighbors in adj_list.items() + if degree[node] >= k + } + + k += 1 + if k_core: + k_core_subgraph = k_core + max_k = k + else: + break + + return max_k, k_core_subgraph + + +def get_ip_name_from_label(label): + ip_name = ("_").join(label.split("_")[2:]) + return ip_name if ip_name else label + + +def test_uf_components(): + """Ensure union find works.""" + adj_list = { + "A": ["B", "C"], + "B": ["A", "C"], + "C": ["A", "B"], + "D": ["E"], + "E": ["D"], + } + + components = compute_components(adj_list) + assert len(components) == 2 + logger.debug(components) + + +def test_k_core(): + """Ensure k-core works.""" + adj_list = { + "A": ["B", "C", "D", "E"], + "B": ["A", "C", "D", "E"], + "C": ["A", "B", "D", "F"], + "D": ["A", "B", "C", "J"], + "E": ["A", "B", "F", "I"], + "F": ["C", "E", "G", "H"], + "G": ["F"], + "H": ["F"], + "I": ["E"], + "J": ["D", "K", "L"], + "K": ["J"], + "L": ["J"], + } + + max_k, k_core = compute_k_core(adj_list) + assert max_k == 3 # A, B, C, D is a 3-core + logger.debug(k_core) + + +if __name__ == "__main__": + main() + test_uf_components() + test_k_core()