-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
0 parents
commit d152039
Showing
7 changed files
with
285 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,23 @@ | ||
name: test | ||
|
||
on: | ||
push: | ||
branches: | ||
- master | ||
- main | ||
pull_request: | ||
|
||
jobs: | ||
test: | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
- uses: erlef/setup-beam@v1 | ||
with: | ||
otp-version: "26.0.2" | ||
gleam-version: "1.0.0" | ||
rebar3-version: "3" | ||
# elixir-version: "1.15.4" | ||
- run: gleam deps download | ||
- run: gleam test | ||
- run: gleam format --check src test |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
*.beam | ||
*.ez | ||
/build | ||
erl_crash.dump |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,25 @@ | ||
# glex | ||
|
||
[![Package Version](https://img.shields.io/hexpm/v/glex)](https://hex.pm/packages/glex) | ||
[![Hex Docs](https://img.shields.io/badge/hex-docs-ffaff3)](https://hexdocs.pm/glex/) | ||
|
||
```sh | ||
gleam add glex | ||
``` | ||
```gleam | ||
import glex | ||
pub fn main() { | ||
// TODO: An example of the project in use | ||
} | ||
``` | ||
|
||
Further documentation can be found at <https://hexdocs.pm/glex>. | ||
|
||
## Development | ||
|
||
```sh | ||
gleam run # Run the project | ||
gleam test # Run the tests | ||
gleam shell # Run an Erlang shell | ||
``` |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,19 @@ | ||
name = "glex" | ||
version = "1.0.0" | ||
|
||
# Fill out these fields if you intend to generate HTML documentation or publish | ||
# your project to the Hex package manager. | ||
# | ||
# description = "" | ||
# licences = ["Apache-2.0"] | ||
# repository = { type = "github", user = "username", repo = "project" } | ||
# links = [{ title = "Website", href = "https://gleam.run" }] | ||
# | ||
# For a full reference of all the available options, you can have a look at | ||
# https://gleam.run/writing-gleam/gleam-toml/. | ||
|
||
[dependencies] | ||
gleam_stdlib = "~> 0.34 or ~> 1.0" | ||
|
||
[dev-dependencies] | ||
gleeunit = "~> 1.0" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,11 @@ | ||
# This file was generated by Gleam | ||
# You typically do not need to edit this file | ||
|
||
packages = [ | ||
{ name = "gleam_stdlib", version = "0.36.0", build_tools = ["gleam"], requirements = [], otp_app = "gleam_stdlib", source = "hex", outer_checksum = "C0D14D807FEC6F8A08A7C9EF8DFDE6AE5C10E40E21325B2B29365965D82EB3D4" }, | ||
{ name = "gleeunit", version = "1.1.2", build_tools = ["gleam"], requirements = ["gleam_stdlib"], otp_app = "gleeunit", source = "hex", outer_checksum = "72CDC3D3F719478F26C4E2C5FED3E657AC81EC14A47D2D2DEBB8693CA3220C3B" }, | ||
] | ||
|
||
[requirements] | ||
gleam_stdlib = { version = "~> 0.34 or ~> 1.0" } | ||
gleeunit = { version = "~> 1.0" } |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,191 @@ | ||
import gleam/string | ||
import gleam/list | ||
import gleam/bit_array | ||
import gleam/regex.{type Options, type Regex, Options} | ||
import gleam/iterator.{type Iterator} | ||
|
||
pub type Token { | ||
Valid(name: String, value: String) | ||
Ignored(name: String, value: String) | ||
Invalid(value: String) | ||
EndOfFile | ||
} | ||
|
||
pub type Position { | ||
Position(Int) | ||
} | ||
|
||
pub opaque type Rule { | ||
Rule(name: String, pattern: String, regex: Regex) | ||
} | ||
|
||
pub fn new_rule(name: String, pattern: String) -> Rule { | ||
let options = Options(case_insensitive: False, multi_line: False) | ||
let pattern = case pattern { | ||
"^" <> _rest -> pattern | ||
_ -> "^" <> pattern | ||
} | ||
let assert Ok(regex) = regex.compile(pattern, options) | ||
Rule(name, pattern, regex) | ||
} | ||
|
||
pub opaque type Lexer { | ||
Lexer(rules: List(Rule), ignore: List(Rule), source: String, position: Int) | ||
} | ||
|
||
pub fn new() -> Lexer { | ||
Lexer([], [], "", 0) | ||
} | ||
|
||
pub fn add_rule(lexer: Lexer, name: String, pattern: String) -> Lexer { | ||
let rule = new_rule(name, pattern) | ||
Lexer( | ||
list.concat([lexer.rules, [rule]]), | ||
lexer.ignore, | ||
lexer.source, | ||
lexer.position, | ||
) | ||
} | ||
|
||
pub fn add_ignore(lexer: Lexer, name: String, pattern: String) -> Lexer { | ||
let rule = new_rule(name, pattern) | ||
Lexer( | ||
lexer.rules, | ||
list.concat([lexer.ignore, [rule]]), | ||
lexer.source, | ||
lexer.position, | ||
) | ||
} | ||
|
||
pub fn build(lexer: Lexer, source: String) -> Lexer { | ||
Lexer(lexer.rules, lexer.ignore, source, 0) | ||
} | ||
|
||
pub fn lex(lexer: Lexer) -> List(#(Token, Position)) { | ||
iterator(lexer) | ||
|> iterator.to_list() | ||
} | ||
|
||
pub fn iterator(lexer: Lexer) -> Iterator(#(Token, Position)) { | ||
use lexer <- iterator.unfold(from: lexer) | ||
|
||
case next(lexer) { | ||
#(_lexer, #(EndOfFile, _position)) -> iterator.Done | ||
#(lexer, wraped_token) -> | ||
iterator.Next(element: wraped_token, accumulator: lexer) | ||
} | ||
} | ||
|
||
pub fn next(lexer: Lexer) -> #(Lexer, #(Token, Position)) { | ||
let ignore = lex_rules(lexer.ignore, lexer.source) | ||
case ignore { | ||
Ok(#(name, value)) -> { | ||
let rest = | ||
lexer.source | ||
|> string.drop_left(string.length(value)) | ||
#( | ||
advance(lexer, rest, string.length(value)), | ||
wrap_token(lexer, Ignored(name, value)), | ||
) | ||
} | ||
Error(Nil) -> { | ||
let rules = lex_rules(lexer.rules, lexer.source) | ||
case rules { | ||
Ok(#(name, value)) -> { | ||
let rest = | ||
lexer.source | ||
|> string.drop_left(string.length(value)) | ||
#( | ||
advance(lexer, rest, string.length(value)), | ||
wrap_token(lexer, Valid(name, value)), | ||
) | ||
} | ||
Error(Nil) -> { | ||
case string.pop_grapheme(lexer.source) { | ||
Error(Nil) -> #(lexer, #(EndOfFile, Position(lexer.position))) | ||
Ok(#(grapheme, rest)) -> { | ||
#( | ||
advance(lexer, rest, byte_size(grapheme)), | ||
wrap_token(lexer, Invalid(grapheme)), | ||
) | ||
} | ||
} | ||
} | ||
} | ||
} | ||
} | ||
} | ||
|
||
pub fn ok(tokens: List(#(Token, Position))) -> Bool { | ||
tokens | ||
|> list.all(fn(token) { | ||
case token { | ||
#(Invalid(_), _) -> False | ||
_ -> True | ||
} | ||
}) | ||
} | ||
|
||
pub fn valid_only(tokens: List(#(Token, Position))) -> List(#(Token, Position)) { | ||
tokens | ||
|> list.filter(fn(token) { | ||
case token { | ||
#(Valid(_, _), _) -> True | ||
_ -> False | ||
} | ||
}) | ||
} | ||
|
||
pub fn ignored_only( | ||
tokens: List(#(Token, Position)), | ||
) -> List(#(Token, Position)) { | ||
tokens | ||
|> list.filter(fn(token) { | ||
case token { | ||
#(Ignored(_, _), _) -> True | ||
_ -> False | ||
} | ||
}) | ||
} | ||
|
||
pub fn invalid_only( | ||
tokens: List(#(Token, Position)), | ||
) -> List(#(Token, Position)) { | ||
tokens | ||
|> list.filter(fn(token) { | ||
case token { | ||
#(Invalid(_), _) -> True | ||
_ -> False | ||
} | ||
}) | ||
} | ||
|
||
fn advance(lexer: Lexer, source: String, offset: Int) -> Lexer { | ||
Lexer( | ||
rules: lexer.rules, | ||
ignore: lexer.ignore, | ||
source: source, | ||
position: lexer.position + offset, | ||
) | ||
} | ||
|
||
fn wrap_token(lexer: Lexer, token: Token) -> #(Token, Position) { | ||
#(token, Position(lexer.position)) | ||
} | ||
|
||
fn byte_size(string: String) -> Int { | ||
bit_array.byte_size(<<string:utf8>>) | ||
} | ||
|
||
fn lex_rules( | ||
rules: List(Rule), | ||
source: String, | ||
) -> Result(#(String, String), Nil) { | ||
rules | ||
|> list.flat_map(fn(rule) { | ||
rule.regex | ||
|> regex.scan(source) | ||
|> list.map(fn(match) { #(rule.name, match.content) }) | ||
}) | ||
|> list.first() | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
import gleeunit | ||
import gleeunit/should | ||
|
||
pub fn main() { | ||
gleeunit.main() | ||
} | ||
|
||
// gleeunit test functions end in `_test` | ||
pub fn hello_world_test() { | ||
1 | ||
|> should.equal(1) | ||
} |