Skip to content

Commit

Permalink
Re-use util functions in graphstats
Browse files Browse the repository at this point in the history
  • Loading branch information
DanielSeemaier committed Sep 11, 2023
1 parent 1edc9c8 commit 9ba2227
Showing 1 changed file with 27 additions and 39 deletions.
66 changes: 27 additions & 39 deletions app/tools/graphstats.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,18 @@

using namespace kagen;

PEID get_rank() {
PEID rank;
MPI_Comm_rank(MPI_COMM_WORLD, &rank);
return rank;
}

PEID get_size() {
PEID size;
MPI_Comm_size(MPI_COMM_WORLD, &size);
return size;
}
struct Statistics {
std::string name;
SInt n;
SInt m;
SInt min_deg;
LPFloat avg_deg;
SInt max_deg;
};

Graph load_graph(const PGeneratorConfig& config) {
const PEID rank = get_rank();
const PEID size = get_size();
Graph LoadGraph(const PGeneratorConfig& config) {
const PEID rank = GetCommRank(MPI_COMM_WORLD);
const PEID size = GetCommSize(MPI_COMM_WORLD);

FileGraphFactory factory;
const auto normalized_config = factory.NormalizeParameters(config, rank, size, false);
Expand All @@ -37,16 +34,7 @@ Graph load_graph(const PGeneratorConfig& config) {
return loader->Take();
}

struct Statistics {
std::string name;
SInt n;
SInt m;
SInt min_deg;
LPFloat avg_deg;
SInt max_deg;
};

void print_csv_header() {
void PrintHeader() {
std::cout << "Graph,";
std::cout << "N,";
std::cout << "M,";
Expand All @@ -56,7 +44,7 @@ void print_csv_header() {
std::cout << std::endl;
}

void print_csv_row(const Statistics& stats) {
void PrintRow(const Statistics& stats) {
std::cout << stats.name << ",";
std::cout << stats.n << ",";
std::cout << stats.m << ",";
Expand All @@ -66,24 +54,24 @@ void print_csv_row(const Statistics& stats) {
std::cout << std::endl;
}

std::string extract_filename(const std::string& filename) {
std::string ExtractFilename(const std::string& filename) {
const auto pos = filename.find_last_of('/');
if (pos == std::string::npos) {
return filename;
}
return filename.substr(pos + 1);
}

std::string strip_extension(const std::string& filename) {
std::string StripExtension(const std::string& filename) {
const auto pos = filename.find_last_of('.');
if (pos == std::string::npos) {
return filename;
}
return filename.substr(0, pos);
}

Statistics generate_internal(const PGeneratorConfig& config) {
Graph graph = load_graph(config);
Statistics GenerateInternal(const PGeneratorConfig& config) {
Graph graph = LoadGraph(config);

Statistics stats;
stats.n = FindNumberOfGlobalNodes(graph.vertex_range, MPI_COMM_WORLD);
Expand All @@ -97,8 +85,8 @@ Statistics generate_internal(const PGeneratorConfig& config) {
return stats;
}

Statistics generate_external(const PGeneratorConfig& config, const int num_chunks) {
if (get_size() > 1) {
Statistics GenerateExternal(const PGeneratorConfig& config, const int num_chunks) {
if (GetCommSize(MPI_COMM_WORLD) > 1) {
std::cerr << "Error: external statistics generation is only supported for a single MPI process\n";
std::exit(1);
}
Expand Down Expand Up @@ -164,8 +152,8 @@ int main(int argc, char* argv[]) {
CLI11_PARSE(app, argc, argv);

// Catch special case: only print CSV header line
if ((do_header_only || !do_no_header) && get_rank() == ROOT) {
print_csv_header();
if ((do_header_only || !do_no_header) && GetCommRank(MPI_COMM_WORLD) == ROOT) {
PrintHeader();
}
if (do_header_only) {
return MPI_Finalize();
Expand All @@ -176,18 +164,18 @@ int main(int argc, char* argv[]) {

Statistics stats;
if (num_chunks == 1) {
stats = generate_internal(config);
stats = GenerateInternal(config);
} else {
stats = generate_external(config, num_chunks);
stats = GenerateExternal(config, num_chunks);
}

stats.name = extract_filename(config.input_graph.filename);
stats.name = ExtractFilename(config.input_graph.filename);
if (do_strip_extension) {
stats.name = strip_extension(stats.name);
stats.name = StripExtension(stats.name);
}

if (get_rank() == ROOT) {
print_csv_row(stats);
if (GetCommRank(MPI_COMM_WORLD) == ROOT) {
PrintRow(stats);
}
}

Expand Down

0 comments on commit 9ba2227

Please sign in to comment.