Skip to content

Commit

Permalink
Implement pool and include, deterministic output.
Browse files Browse the repository at this point in the history
  • Loading branch information
dillof committed Dec 27, 2023
1 parent 2fc394c commit f04afe4
Show file tree
Hide file tree
Showing 8 changed files with 160 additions and 44 deletions.
4 changes: 2 additions & 2 deletions src/Bindings.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifndef BINDINGS_H
#define BINDINGS_H

#include <unordered_map>
#include <map>

#include "Variable.h"

Expand All @@ -57,7 +57,7 @@ class Bindings {
auto find(const std::string& name) const {return variables.find(name);}

private:
std::unordered_map<std::string, Variable> variables;
std::map<std::string, Variable> variables;
};

#endif // BINDINGS_H
1 change: 1 addition & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ ADD_EXECUTABLE(fast-ninja
Bindings.cc
Build.cc
File.cc
Pool.cc
Rule.cc
ScopedDirective.cc
Text.cc
Expand Down
92 changes: 53 additions & 39 deletions src/File.cc
Original file line number Diff line number Diff line change
Expand Up @@ -149,60 +149,70 @@ void File::create_output() const {
}

void File::parse(const std::filesystem::path& filename) {
auto tokenizer = Tokenizer(filename);

try {
while (const auto token = tokenizer.next()) {
switch (token.type) {
case Tokenizer::TokenType::NEWLINE:
case Tokenizer::TokenType::SPACE:
case Tokenizer::TokenType::END:
break;
auto tokenizers = std::vector<Tokenizer>{};
tokenizers.emplace_back(filename);

while (!tokenizers.empty()) {
auto& tokenizer = tokenizers.back();

try {
const auto token = tokenizer.next();

case Tokenizer::TokenType::BUILD:
parse_build(tokenizer);
switch (token.type) {
case Tokenizer::TokenType::END:
tokenizers.pop_back();
break;

case Tokenizer::TokenType::DEFAULT:
parse_default(tokenizer);
case Tokenizer::TokenType::NEWLINE:
case Tokenizer::TokenType::SPACE:
break;

case Tokenizer::TokenType::BUILD:
parse_build(tokenizer);
break;

case Tokenizer::TokenType::INCLUDE:
// TODO
case Tokenizer::TokenType::DEFAULT:
parse_default(tokenizer);
break;

case Tokenizer::TokenType::POOL:
parse_pool(tokenizer);
case Tokenizer::TokenType::INCLUDE: {
auto name = tokenizer.expect(Tokenizer::TokenType::WORD, Tokenizer::Skip::SPACE);
tokenizers.emplace_back(name.string());
break;
}

case Tokenizer::TokenType::POOL:
parse_pool(tokenizer);
break;

case Tokenizer::TokenType::RULE:
parse_rule(tokenizer);
case Tokenizer::TokenType::RULE:
parse_rule(tokenizer);
break;

case Tokenizer::TokenType::SUBNINJA:
parse_subninja(tokenizer);
case Tokenizer::TokenType::SUBNINJA:
parse_subninja(tokenizer);
break;

case Tokenizer::TokenType::WORD:
parse_assignment(tokenizer, token.value);
case Tokenizer::TokenType::WORD:
parse_assignment(tokenizer, token.value);
break;

case Tokenizer::TokenType::ASSIGN:
case Tokenizer::TokenType::ASSIGN_LIST:
case Tokenizer::TokenType::BEGIN_SCOPE:
case Tokenizer::TokenType::COLON:
case Tokenizer::TokenType::END_SCOPE:
case Tokenizer::TokenType::IMPLICIT_DEPENDENCY:
case Tokenizer::TokenType::ORDER_DEPENDENCY:
case Tokenizer::TokenType::VALIDATION_DEPENDENCY:
case Tokenizer::TokenType::VARIABLE_REFERENCE:
throw Exception("invalid token");
case Tokenizer::TokenType::ASSIGN:
case Tokenizer::TokenType::ASSIGN_LIST:
case Tokenizer::TokenType::BEGIN_SCOPE:
case Tokenizer::TokenType::COLON:
case Tokenizer::TokenType::END_SCOPE:
case Tokenizer::TokenType::IMPLICIT_DEPENDENCY:
case Tokenizer::TokenType::ORDER_DEPENDENCY:
case Tokenizer::TokenType::VALIDATION_DEPENDENCY:
case Tokenizer::TokenType::VARIABLE_REFERENCE:
throw Exception("invalid token");
}
} catch (Exception& ex) {
std::cerr << tokenizer.file_name() << ":" << tokenizer.current_line_number() << ": " << ex.what() << std::endl;
throw Exception();
}
}
} catch (Exception& ex) {
std::cerr << filename.string() << ":" << tokenizer.current_line_number() << ": " << ex.what() << std::endl;
throw Exception();
}
}

void File::parse_assignment(Tokenizer& tokenizer, const std::string& variable_name) {
Expand All @@ -225,8 +235,12 @@ void File::parse_build(Tokenizer& tokenizer) { builds.emplace_back(tokenizer); }
void File::parse_default(Tokenizer& tokenizer) { defaults.append(Text(tokenizer, Tokenizer::TokenType::NEWLINE)); }

void File::parse_pool(Tokenizer& tokenizer) {
throw Exception("pool not implemented yet");
// TODO: implement
tokenizer.skip_space();
const auto token = tokenizer.next();
if (token.type != Tokenizer::TokenType::WORD) {
throw Exception("name expected");
}
pools[token.string()] = Pool(token.string(), tokenizer);
}

void File::parse_rule(Tokenizer& tokenizer) {
Expand Down
6 changes: 4 additions & 2 deletions src/File.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "Bindings.h"
#include <filesystem>
#include <string>
#include <unordered_map>
#include <map>
#include <unordered_set>

#include "Build.h"
#include "Pool.h"
#include "Rule.h"
#include "Variable.h"

Expand Down Expand Up @@ -75,7 +76,8 @@ class File {
const File* previous = nullptr;

std::unordered_set<std::string> outputs;
std::unordered_map<std::string, Rule> rules;
std::map<std::string, Rule> rules;
std::map<std::string, Pool> pools;
std::vector<Build> builds;
Bindings bindings;
Text defaults;
Expand Down
44 changes: 44 additions & 0 deletions src/Pool.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
/*
Pool.cc --
Copyright (C) Dieter Baron
The authors can be contacted at <[email protected]>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. The names of the authors may not be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS "AS IS" AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

#include "Pool.h"

Pool::Pool(std::string name, Tokenizer& tokenizer) : name{ std::move(name) } {
tokenizer.expect(Tokenizer::TokenType::NEWLINE, Tokenizer::Skip::SPACE);
bindings = Bindings{tokenizer};
}

void Pool::process(const File& file) { bindings.process(file); }

void Pool::print(std::ostream& stream) const {
stream << std::endl << "pool " << name << std::endl;
bindings.print(stream, " ");
}
53 changes: 53 additions & 0 deletions src/Pool.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
/*
Pool.h --
Copyright (C) Dieter Baron
The authors can be contacted at <[email protected]>
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. The names of the authors may not be used to endorse or promote
products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHORS "AS IS" AND ANY EXPRESS
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/

#ifndef POOL_H
#define POOL_H

#include <string>

#include "Bindings.h"
#include "Variable.h"

class Pool {
public:
Pool() = default;
Pool(std::string name, Tokenizer& tokenizer);

void process(const File& file);
void print(std::ostream& stream) const;

private:
std::string name;
Bindings bindings;
};

#endif //POOL_H
2 changes: 1 addition & 1 deletion src/Tokenizer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ std::unordered_map<int, Tokenizer::CharacterType> Tokenizer::special_characters
};
// clang-format on

Tokenizer::Tokenizer(const std::filesystem::path& filename): source{filename} {
Tokenizer::Tokenizer(const std::filesystem::path& filename): filename{filename}, source{filename} {
if (source.fail()) {
throw Exception("can't open '%s'", filename.c_str());
}
Expand Down
2 changes: 2 additions & 0 deletions src/Tokenizer.h
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ class Tokenizer {
void skip_whitespace();
void unget(const Token& token);
[[nodiscard]] int current_line_number() const {return line_number;}
[[nodiscard]] const std::string&file_name() const {return filename;}

private:
static CharacterType type(int c);
Expand All @@ -120,6 +121,7 @@ class Tokenizer {
static std::unordered_map<std::string, TokenType> keywords;
static std::unordered_map<int, CharacterType> special_characters;

std::string filename;
std::ifstream source;
std::optional<Token> ungot;
bool beggining_of_line = true;
Expand Down

0 comments on commit f04afe4

Please sign in to comment.