Skip to content

Commit

Permalink
Add generator rules to regenate build.ninja files.
Browse files Browse the repository at this point in the history
  • Loading branch information
dillof committed Jan 1, 2024
1 parent d407823 commit 988c034
Show file tree
Hide file tree
Showing 9 changed files with 140 additions and 87 deletions.
5 changes: 5 additions & 0 deletions src/Bindings.cc
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,11 @@ Bindings::Bindings(Tokenizer& tokenizer) {
}
}

Bindings::Bindings(const std::vector<Variable>& variable_list) {
for (auto& variable: variable_list) {
variables[variable.name] = variable;
}
}

void Bindings::print(std::ostream& stream, const std::string& indent) const {
for (auto& pair : *this) {
Expand Down
1 change: 1 addition & 0 deletions src/Bindings.h
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ class Bindings {
public:
Bindings() = default;
explicit Bindings(Tokenizer& tokenizer);
explicit Bindings(const std::vector<Variable>& variable_list);

void print(std::ostream& stream, const std::string& indent) const;
void process(const File& file);
Expand Down
3 changes: 2 additions & 1 deletion src/Build.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ class Build {
public:
Build() = default;
explicit Build(Tokenizer& tokenizer);
Build(std::string rule_name, Text outputs, Text inputs, Bindings bindings): rule_name{std::move(rule_name)}, outputs{std::move(outputs)}, inputs{std::move(inputs)}, bindings{std::move(bindings)} {}

void process(const File& file);
void process_outputs(const File& file);
Expand All @@ -50,7 +51,7 @@ class Build {
[[nodiscard]] std::unordered_set<std::string> get_outputs() const;

private:
const Rule* rule = nullptr;
const Rule* rule{};
std::string rule_name;
Text outputs;
Text inputs;
Expand Down
199 changes: 116 additions & 83 deletions src/File.cc
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,22 @@ File::File(const std::filesystem::path& filename, const std::filesystem::path& b
}

void File::process() {
if (is_top()) {
rules["fast-ninja"] = Rule("fast-ninja", Bindings({
Variable("command", false, Text{std::vector<Text::Element>{
Text::Element{Text::ElementType::WORD, "fast-ninja"},
Text::Element{Text::ElementType::WHITESPACE, " "},
Text::Element{Text::ElementType::VARIABLE, "top_source_directory"}
}}),
Variable("generator", false, Text("1"))
}));
auto outputs = Text{};
auto inputs = Text{};
add_generator_build(outputs, inputs);

builds.emplace_back("fast-ninja", outputs, inputs, Bindings{});
}

for (auto& build : builds) {
build.process_outputs(*this);
auto current_outputs = build.get_outputs();
Expand Down Expand Up @@ -99,47 +115,45 @@ const Variable* File::find_variable(const std::string& name) const {
}

void File::create_output() const {
{
std::filesystem::create_directories(build_directory);
auto stream = std::ofstream(build_filename);
std::filesystem::create_directories(build_directory);
auto stream = std::ofstream(build_filename);

if (stream.fail()) {
throw Exception("can't create output '%s'", build_filename.c_str());
}
if (stream.fail()) {
throw Exception("can't create output '%s'", build_filename.c_str());
}

auto top_file = this;
while (top_file->previous) {
top_file = top_file->previous;
}
auto top_file = this;
while (!top_file->is_top()) {
top_file = top_file->previous;
}

stream << "top_source_directory = " << top_file->source_directory.string() << std::endl;
stream << "source_directory = " << source_directory.string() << std::endl;
stream << "top_build_directory = " << top_file->build_directory.string() << std::endl;
stream << "build_directory = " << build_directory.string() << std::endl;
stream << "top_source_directory = " << top_file->source_directory.string() << std::endl;
stream << "source_directory = " << source_directory.string() << std::endl;
stream << "top_build_directory = " << top_file->build_directory.string() << std::endl;
stream << "build_directory = " << build_directory.string() << std::endl;

if (!bindings.empty()) {
stream << std::endl;
bindings.print(stream, "");
}
if (!bindings.empty()) {
stream << std::endl;
bindings.print(stream, "");
}

for (auto& pair : rules) {
pair.second.print(stream);
}
for (auto& pair : rules) {
pair.second.print(stream);
}

for (auto& build : builds) {
build.print(stream);
}
for (auto& build : builds) {
build.print(stream);
}

if (!defaults.empty()) {
stream << std::endl;
stream << "default " << defaults << std::endl;
}
if (!defaults.empty()) {
stream << std::endl;
stream << "default " << defaults << std::endl;
}

if (!subninjas.empty()) {
stream << std::endl;
for (auto& subninja : subninjas) {
stream << "subninja " << replace_extension(subninja, "ninja").string() << std::endl;
}
if (!subninjas.empty()) {
stream << std::endl;
for (auto& subninja : subninjas) {
stream << "subninja " << replace_extension(subninja, "ninja").string() << std::endl;
}
}

Expand All @@ -152,67 +166,68 @@ void File::parse(const std::filesystem::path& filename) {
auto tokenizers = std::vector<Tokenizer>{};
tokenizers.emplace_back(filename);

while (!tokenizers.empty()) {
auto& tokenizer = tokenizers.back();
while (!tokenizers.empty()) {
auto& tokenizer = tokenizers.back();

try {
const auto token = tokenizer.next();
try {
const auto token = tokenizer.next();

switch (token.type) {
case Tokenizer::TokenType::END:
tokenizers.pop_back();
break;
switch (token.type) {
case Tokenizer::TokenType::END:
tokenizers.pop_back();
break;

case Tokenizer::TokenType::NEWLINE:
case Tokenizer::TokenType::SPACE:
break;

case Tokenizer::TokenType::BUILD:
parse_build(tokenizer);
case Tokenizer::TokenType::NEWLINE:
case Tokenizer::TokenType::SPACE:
break;

case Tokenizer::TokenType::DEFAULT:
parse_default(tokenizer);
break;
case Tokenizer::TokenType::BUILD:
parse_build(tokenizer);
break;

case Tokenizer::TokenType::INCLUDE: {
auto name = tokenizer.expect(Tokenizer::TokenType::WORD, Tokenizer::Skip::SPACE);
tokenizers.emplace_back(name.string());
break;
}
case Tokenizer::TokenType::DEFAULT:
parse_default(tokenizer);
break;

case Tokenizer::TokenType::POOL:
parse_pool(tokenizer);
case Tokenizer::TokenType::INCLUDE: {
auto name = tokenizer.expect(Tokenizer::TokenType::WORD, Tokenizer::Skip::SPACE);
includes.insert(name.string());
tokenizers.emplace_back(name.string());
break;

case Tokenizer::TokenType::RULE:
parse_rule(tokenizer);
break;

case Tokenizer::TokenType::SUBNINJA:
parse_subninja(tokenizer);
break;

case Tokenizer::TokenType::WORD:
parse_assignment(tokenizer, token.value);
break;

case Tokenizer::TokenType::ASSIGN:
case Tokenizer::TokenType::ASSIGN_LIST:
case Tokenizer::TokenType::BEGIN_SCOPE:
case Tokenizer::TokenType::COLON:
case Tokenizer::TokenType::END_SCOPE:
case Tokenizer::TokenType::IMPLICIT_DEPENDENCY:
case Tokenizer::TokenType::ORDER_DEPENDENCY:
case Tokenizer::TokenType::VALIDATION_DEPENDENCY:
case Tokenizer::TokenType::VARIABLE_REFERENCE:
throw Exception("invalid token");
}
} catch (Exception& ex) {
std::cerr << tokenizer.file_name().string() << ":" << tokenizer.current_line_number() << ": " << ex.what() << std::endl;
throw Exception();

case Tokenizer::TokenType::POOL:
parse_pool(tokenizer);
break;

case Tokenizer::TokenType::RULE:
parse_rule(tokenizer);
break;

case Tokenizer::TokenType::SUBNINJA:
parse_subninja(tokenizer);
break;

case Tokenizer::TokenType::WORD:
parse_assignment(tokenizer, token.value);
break;

case Tokenizer::TokenType::ASSIGN:
case Tokenizer::TokenType::ASSIGN_LIST:
case Tokenizer::TokenType::BEGIN_SCOPE:
case Tokenizer::TokenType::COLON:
case Tokenizer::TokenType::END_SCOPE:
case Tokenizer::TokenType::IMPLICIT_DEPENDENCY:
case Tokenizer::TokenType::ORDER_DEPENDENCY:
case Tokenizer::TokenType::VALIDATION_DEPENDENCY:
case Tokenizer::TokenType::VARIABLE_REFERENCE:
throw Exception("invalid token");
}
} catch (Exception& ex) {
std::cerr << tokenizer.file_name().string() << ":" << tokenizer.current_line_number() << ": " << ex.what() << std::endl;
throw Exception();
}
}
}

void File::parse_assignment(Tokenizer& tokenizer, const std::string& variable_name) {
Expand Down Expand Up @@ -257,3 +272,21 @@ void File::parse_subninja(Tokenizer& tokenizer) {

subninjas.emplace_back(text.string());
}

void File::add_generator_build(Text& outputs, Text& inputs) const {
if (!outputs.empty()) {
outputs.emplace_back(Text::ElementType::WHITESPACE, " ");
}
outputs.emplace_back(Text::ElementType::WORD, build_filename.string());
if (!inputs.empty()) {
inputs.emplace_back(Text::ElementType::WHITESPACE, " ");
}
inputs.emplace_back(Text::ElementType::WORD, source_filename.string());
for (auto& file: includes) {
inputs.emplace_back(Text::ElementType::WHITESPACE, " ");
inputs.emplace_back(Text::ElementType::WORD, file);
}
for (const auto& file: subfiles) {
file->add_generator_build(outputs, inputs);
}
}
5 changes: 5 additions & 0 deletions src/File.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include <filesystem>
#include <string>
#include <map>
#include <set>
#include <unordered_set>

#include "Build.h"
Expand All @@ -53,6 +54,7 @@ class File {

void process();
[[nodiscard]] bool is_output(const std::string& word) const {return outputs.contains(word);}
[[nodiscard]] bool is_top() const {return !previous;}
[[nodiscard]] const Rule* find_rule(const std::string& name) const;
[[nodiscard]] const Variable* find_variable(const std::string& name) const;

Expand All @@ -70,12 +72,15 @@ class File {
void parse_rule(Tokenizer& tokenizer);
void parse_subninja(Tokenizer& tokenizer);

void add_generator_build(Text& outputs, Text& inputs) const;

std::filesystem::path source_filename;
std::filesystem::path build_filename;

const File* previous = nullptr;

std::unordered_set<std::string> outputs;
std::set<std::string> includes;
std::map<std::string, Rule> rules;
std::map<std::string, Pool> pools;
std::vector<Build> builds;
Expand Down
1 change: 1 addition & 0 deletions src/Rule.h
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ class Rule {
public:
Rule() = default;
Rule(std::string name, Tokenizer& tokenizer);
Rule(std::string name, Bindings bindings): name{std::move(name)}, bindings{std::move(bindings)} {}

void process(const File& file);
void print(std::ostream& stream) const;
Expand Down
2 changes: 2 additions & 0 deletions src/Text.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,8 @@ class Text {

Text() = default;
Text(Tokenizer& tokenizer, Tokenizer::TokenType terminator);
explicit Text(std::string value): Text{std::vector<Element>{Element{ElementType::WORD, std::move(value)}}} {}
explicit Text(std::vector<Element> elements): elements{std::move(elements)} {}

void append(const Text& other) {elements.insert(elements.end(), other.elements.begin(), other.elements.end());}
void emplace_back(ElementType type, std::string value) { elements.emplace_back(type, std::move(value)); }
Expand Down
5 changes: 2 additions & 3 deletions src/Variable.h
Original file line number Diff line number Diff line change
Expand Up @@ -44,16 +44,15 @@ class Variable {
public:
Variable() = default;
Variable(std::string name, bool is_list, Tokenizer& tokenizer);
Variable(std::string name, bool is_list, Text value): name{std::move(name)}, is_list{is_list}, value{std::move(value)} {}

void process(const File& file);
void print_definition(std::ostream& stream) const;
void print_use(std::ostream& stream) const;

std::string name;
bool is_list = false;
Text value;

private:
std::string name;
};


Expand Down
6 changes: 6 additions & 0 deletions tests/rule.test
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,10 @@ build_directory = .
rule a
command = a $in $out
flags = --verbose

rule fast-ninja
command = fast-ninja $top_source_directory
generator = 1

build ./build.ninja : fast-ninja ../build.fninja
end-of-inline-data

0 comments on commit 988c034

Please sign in to comment.