Skip to content

Commit ef823e8

Browse files
authored
Merge pull request #14 from spyoungtech/from_bytes
add from_bytes to deserializer
2 parents 561e40d + cb7626e commit ef823e8

File tree

5 files changed

+52
-17
lines changed

5 files changed

+52
-17
lines changed

examples/json5-trailing-comma-formatter/src/main.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ use json_five::rt::parser::{from_str, ArrayValueContext, JSONValue, KeyValuePair
22

33
fn format_value(val: &mut JSONValue) {
44
match val {
5-
JSONValue::JSONObject { key_value_pairs, context } => {
5+
JSONValue::JSONObject { key_value_pairs, .. } => {
66
let length = key_value_pairs.len();
77
for (idx, kvp) in key_value_pairs.iter_mut().enumerate() {
88
match kvp.value {

src/de.rs

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use serde::de::{self, DeserializeSeed, Deserialize, Deserializer, MapAccess, SeqAccess, VariantAccess, Visitor};
22
use std::fmt;
3-
use crate::parser::{JSONValue, JSONKeyValuePair, UnaryOperator, from_str as model_from_str};
3+
use crate::parser::{JSONValue, JSONKeyValuePair, UnaryOperator, from_str as model_from_str, from_bytes as model_from_bytes};
44
use crate::utils::unescape;
55
#[derive(Debug)]
66
pub enum SerdeJSON5Error {
@@ -579,6 +579,22 @@ where
579579
T::deserialize(deserializer)
580580
}
581581

582+
pub fn from_bytes<'de, T>(s: &'de [u8]) -> Result<T, SerdeJSON5Error>
583+
where
584+
T: Deserialize<'de>,
585+
{
586+
// 1) Parse the string into your JSONText
587+
let parsed = model_from_bytes(s).map_err(|err| SerdeJSON5Error::Custom(err.to_string()))?;
588+
589+
// 2) Wrap the JSONValue in our deserializer
590+
let deserializer = JSONValueDeserializer {
591+
input: &parsed.value,
592+
};
593+
594+
// 3) Deserialize into the caller’s type T
595+
T::deserialize(deserializer)
596+
}
597+
582598
#[cfg(test)]
583599
mod test {
584600
use std::collections::HashMap;

src/lib.rs

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
#![doc = include_str!("../README.md")]
2+
13
/// The default performance-focused tokenizer
24
pub mod tokenize;
35

@@ -16,35 +18,26 @@ pub mod de;
1618
#[cfg(feature = "serde")]
1719
pub mod ser;
1820

19-
/// The `serde` deserializer
2021
#[cfg(feature = "serde")]
21-
pub use de::{from_str, JSONValueDeserializer};
22+
pub use de::{from_str, from_bytes, JSONValueDeserializer};
2223

23-
/// the `serde` serializer
2424
#[cfg(feature = "serde")]
2525
pub use ser::{to_string, to_string_formatted, Serializer};
2626

27-
/// turn your strings into abstract JSON5 model (AST)
28-
pub use parser::from_str as model_from_str;
2927

30-
/// Turn [crate::tokenize::Tokens] into AST
28+
pub use parser::{from_str as model_from_str, from_bytes as model_from_bytes};
29+
3130
pub use parser::from_tokens as model_from_tokens;
3231

33-
/// formatting configuration for use with [crate::to_string_formatted]
3432
pub use parser::{FormatConfiguration, TrailingComma};
3533

36-
/// turn str/bytes into [crate::tokenize::Tokens]
37-
pub use tokenize::{tokenize_bytes, tokenize_str};
34+
pub use tokenize::{tokenize_bytes, tokenize_str, tokenize_rt_str, tokenize_rt_bytes};
3835

39-
/// round-trip source (str) into [crate::rt::tokenize::Token]s
4036
pub use rt::tokenize::{tokens_to_source, source_to_tokens};
4137

38+
4239
/// The round-tripping module
4340
pub mod rt {
4441
pub mod parser;
4542
pub mod tokenize;
4643
}
47-
48-
#[doc = include_str!("../README.md")]
49-
#[cfg(doctest)]
50-
pub struct ReadmeDoctests;

src/parser.rs

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -580,11 +580,34 @@ pub fn from_str<'input>(source: &'input str) -> Result<JSONText<'input>, Parsing
580580
}
581581
}
582582

583+
/// Like [from_str] but for bytes
584+
pub fn from_bytes<'input>(source: &'input [u8]) -> Result<JSONText<'input>, ParsingError> {
585+
use crate::tokenize::tokenize_bytes;
586+
let maybe_toks = tokenize_bytes(source);
587+
match maybe_toks {
588+
Err(e) => {
589+
Err(ParsingError{index: e.index, message: e.message, char_index: e.char_index, lineno: e.lineno, colno: e.colno})
590+
}
591+
Ok(toks) => {
592+
from_tokens(&toks)
593+
}
594+
}
595+
}
596+
583597
#[cfg(test)]
584598
mod tests {
585599
use crate::tokenize::Tokenizer;
586600
use crate::parser::JSONValue::*;
587601
use super::*;
602+
603+
#[test]
604+
fn test_from_bytes() {
605+
let res = from_bytes(b"{}").unwrap();
606+
let expected = JSONText{value: JSONValue::JSONObject {key_value_pairs: vec![]}};
607+
assert_eq!(res, expected)
608+
609+
}
610+
588611
#[test]
589612
fn test_foo() {
590613
let res = from_str("{}").unwrap();

src/tokenize.rs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -640,16 +640,18 @@ impl<'input> Iterator for Tokenizer<'input> {
640640
/// Turn str into [Tokens].
641641
///
642642
/// Usually not used directly.
643+
/// Token spans will not include whitespace and comment tokens
643644
pub fn tokenize_str(text: &'_ str) -> Result<Tokens<'_>, TokenizationError> {
644645
Tokenizer::new(text).tokenize()
645646
}
646647

648+
/// Like [tokenize_str] but includes whitespace and comment tokens
647649
pub fn tokenize_rt_str(text: &'_ str) -> Result<Tokens<'_>, TokenizationError> {
648650
let config = TokenizerConfig{include_comments: true, include_whitespace: true, allow_octal: false};
649651
Tokenizer::with_configuration(text, config).tokenize()
650652
}
651653

652-
/// Tokenize bytes to [Tokens]
654+
/// Like [tokenize_str] but for bytes
653655
pub fn tokenize_bytes(bytes: &'_ [u8]) -> Result<Tokens<'_>, TokenizationError> {
654656
let maybe_text = std::str::from_utf8(bytes);
655657
match maybe_text {
@@ -669,6 +671,7 @@ pub fn tokenize_bytes(bytes: &'_ [u8]) -> Result<Tokens<'_>, TokenizationError>
669671
}
670672
}
671673

674+
/// Like [tokenize_rt_str] but for bytes
672675
pub fn tokenize_rt_bytes(bytes: &'_ [u8]) -> Result<Tokens<'_>, TokenizationError> {
673676
let maybe_text = std::str::from_utf8(bytes);
674677
match maybe_text {

0 commit comments

Comments
 (0)