Skip to content

Commit

Permalink
Remove from_str and From impls from types
Browse files Browse the repository at this point in the history
- Due to assumptions made about the contents of a type (for instance a
  start tag must end in '>'), the ability to construct token and
  property types directly is removed. This also leaves open the door for
  further changes in the implementation.
- Remove as_str(), as_bytes(), as_str_unchecked(), to_str(), and
  into_inner() from Attribute and Attributes types. Both are opaque
  types.
  • Loading branch information
bluk committed Nov 14, 2023
1 parent 6ec9171 commit e62e544
Show file tree
Hide file tree
Showing 5 changed files with 265 additions and 174 deletions.
35 changes: 29 additions & 6 deletions maybe_xml/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,29 @@
//! let mut pos = 0;
//!
//! let token = reader.tokenize(&mut pos);
//! assert_eq!(Some(Ty::StartTag(StartTag::from_str("<id>"))), token.map(|t| t.ty()));
//! if let Some(Ty::StartTag(tag)) = token.map(|t| t.ty()) {
//! assert_eq!("id", tag.name().local().as_str());
//! assert_eq!(None, tag.name().namespace_prefix());
//! } else {
//! panic!();
//! }
//! assert_eq!(4, pos);
//!
//! let token = reader.tokenize(&mut pos);
//! assert_eq!(Some(Ty::Characters(Characters::from_str("123"))), token.map(|t| t.ty()));
//! if let Some(Ty::Characters(chars)) = token.map(|t| t.ty()) {
//! assert_eq!("123", chars.content().as_str());
//! } else {
//! panic!();
//! }
//! assert_eq!(7, pos);
//!
//! let token = reader.tokenize(&mut pos);
//! assert_eq!(Some(Ty::EndTag(EndTag::from_str("</id>"))), token.map(|t| t.ty()));
//! if let Some(Ty::EndTag(tag)) = token.map(|t| t.ty()) {
//! assert_eq!("</id>", tag.as_str());
//! assert_eq!("id", tag.name().local().as_str());
//! } else {
//! panic!();
//! }
//! assert_eq!(12, pos);
//!
//! let token = reader.tokenize(&mut pos);
Expand All @@ -45,15 +59,24 @@
//! let mut iter = reader.into_iter().map(|token| token.ty());
//!
//! let token_type = iter.next();
//! assert_eq!(Some(Ty::StartTag(StartTag::from_str("<id>"))), token_type);
//! match token_type {
//! Some(Ty::StartTag(start_tag)) => {
//! assert_eq!(start_tag.name().as_str(), "id");
//! }
//! _ => panic!("unexpected token"),
//! }
//! assert_eq!(Some(Ty::Characters(Characters::from_str("Example"))), iter.next());
//! assert_eq!(Some(Ty::EndTag(EndTag::from_str("</id>"))), iter.next());
//! if let Some(Ty::Characters(chars)) = iter.next() {
//! assert_eq!("Example", chars.content().as_str());
//! } else {
//! panic!();
//! }
//!
//! if let Some(Ty::EndTag(tag)) = iter.next() {
//! assert_eq!("</id>", tag.as_str());
//! assert_eq!("id", tag.name().local().as_str());
//! } else {
//! panic!();
//! }
//! assert_eq!(None, iter.next());
//! ```
//!
Expand Down
66 changes: 52 additions & 14 deletions maybe_xml/src/read.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,29 @@ use scanner::scan;
/// let mut pos = 0;
///
/// let token = reader.tokenize(&mut pos);
/// assert_eq!(Some(Ty::StartTag(StartTag::from_str("<id>"))), token.map(|t| t.ty()));
/// if let Some(Ty::StartTag(tag)) = token.map(|t| t.ty()) {
/// assert_eq!("id", tag.name().local().as_str());
/// assert_eq!(None, tag.name().namespace_prefix());
/// } else {
/// panic!();
/// }
/// assert_eq!(4, pos);
///
/// let token = reader.tokenize(&mut pos);
/// assert_eq!(Some(Ty::Characters(Characters::from_str("123"))), token.map(|t| t.ty()));
/// if let Some(Ty::Characters(chars)) = token.map(|t| t.ty()) {
/// assert_eq!("123", chars.content().as_str());
/// } else {
/// panic!();
/// }
/// assert_eq!(7, pos);
///
/// let token = reader.tokenize(&mut pos);
/// assert_eq!(Some(Ty::EndTag(EndTag::from_str("</id>"))), token.map(|t| t.ty()));
/// if let Some(Ty::EndTag(tag)) = token.map(|t| t.ty()) {
/// assert_eq!("</id>", tag.as_str());
/// assert_eq!("id", tag.name().local().as_str());
/// } else {
/// panic!();
/// }
/// assert_eq!(12, pos);
///
/// let token = reader.tokenize(&mut pos);
Expand Down Expand Up @@ -94,14 +108,23 @@ impl<'a> Reader<'a> {
/// let reader = unsafe { Reader::from_slice_unchecked(&buf) };
/// let mut pos = 0;
///
/// let ty = reader.tokenize(&mut pos).map(|token| token.ty());
/// assert_eq!(Some(Ty::StartTag(StartTag::from_str("<id>"))), ty);
/// let token = reader.tokenize(&mut pos);
/// if let Some(Ty::StartTag(tag)) = token.map(|t| t.ty()) {
/// assert_eq!("id", tag.name().local().as_str());
/// assert_eq!(None, tag.name().namespace_prefix());
/// } else {
/// panic!();
/// }
///
/// // Position was assigned to the index after the end of the token
/// assert_eq!(4, pos);
///
/// let ty = reader.tokenize(&mut pos).map(|token| token.ty());
/// assert_eq!(Some(Ty::Characters(Characters::from_str("123"))), ty);
/// let token = reader.tokenize(&mut pos);
/// if let Some(Ty::Characters(chars)) = token.map(|t| t.ty()) {
/// assert_eq!("123", chars.content().as_str());
/// } else {
/// panic!();
/// }
///
/// // Position was assigned to the index after the end of the token
/// assert_eq!(7, pos);
Expand All @@ -120,8 +143,13 @@ impl<'a> Reader<'a> {
/// // Start tokenizing again with the input
/// let reader = unsafe { Reader::from_slice_unchecked(&buf) };
///
/// let ty = reader.tokenize(&mut pos).map(|token| token.ty());
/// assert_eq!(Some(Ty::EndTag(EndTag::from_str("</id>"))), ty);
/// let token = reader.tokenize(&mut pos);
/// if let Some(Ty::EndTag(tag)) = token.map(|t| t.ty()) {
/// assert_eq!("</id>", tag.as_str());
/// assert_eq!("id", tag.name().local().as_str());
/// } else {
/// panic!();
/// }
///
/// // Position was assigned to the index after the end of the token
/// assert_eq!(5, pos);
Expand Down Expand Up @@ -187,7 +215,12 @@ impl<'a> Reader<'a> {
/// let mut pos = 0;
///
/// let token = reader.tokenize(&mut pos);
/// assert_eq!(Some(Ty::StartTag(StartTag::from_str("<id>"))), token.map(|t| t.ty()));
/// if let Some(Ty::StartTag(tag)) = token.map(|t| t.ty()) {
/// assert_eq!("id", tag.name().local().as_str());
/// assert_eq!(None, tag.name().namespace_prefix());
/// } else {
/// panic!();
/// }
///
/// // Position was assigned to the index after the end of the token
/// assert_eq!(4, pos);
Expand Down Expand Up @@ -250,7 +283,12 @@ impl<'a> Reader<'a> {
/// let mut pos = 0;
///
/// let token = reader.parse(pos);
/// assert_eq!(Some(Ty::StartTag(StartTag::from_str("<id>"))), token.map(|t| t.ty()));
/// if let Some(Ty::StartTag(tag)) = token.map(|t| t.ty()) {
/// assert_eq!("id", tag.name().local().as_str());
/// assert_eq!(None, tag.name().namespace_prefix());
/// } else {
/// panic!();
/// }
///
/// pos += token.map(|t| t.len()).unwrap_or_default();
/// assert_eq!(4, pos);
Expand Down Expand Up @@ -518,17 +556,17 @@ mod tests {
let mut pos = 0;
buf.extend("Hello".as_bytes());
let reader = unsafe { Reader::from_slice_unchecked(&buf) };
assert_eq!(Some(Token::from_str("Hello")), reader.tokenize(&mut pos));
assert_eq!(Some("Hello"), reader.tokenize(&mut pos).map(|t| t.as_str()));
assert_eq!(buf.len(), pos);

buf.extend("wo".as_bytes());
let reader = unsafe { Reader::from_slice_unchecked(&buf) };
assert_eq!(Some(Token::from_str("wo")), reader.tokenize(&mut pos));
assert_eq!(Some("wo"), reader.tokenize(&mut pos).map(|t| t.as_str()));
assert_eq!(buf.len(), pos);

buf.extend("rld!<".as_bytes());
let reader = unsafe { Reader::from_slice_unchecked(&buf) };
assert_eq!(Some(Token::from_str("rld!")), reader.tokenize(&mut pos));
assert_eq!(Some("rld!"), reader.tokenize(&mut pos).map(|t| t.as_str()));
assert_eq!(buf.len() - 1, pos);
}

Expand Down
18 changes: 3 additions & 15 deletions maybe_xml/src/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ impl<'a> Token<'a> {
/// Instantiates a new instance with a string.
#[inline]
#[must_use]
pub const fn from_str(input: &'a str) -> Self {
pub(crate) const fn from_str(input: &'a str) -> Self {
Self(input)
}

Expand Down Expand Up @@ -135,19 +135,13 @@ impl<'a> fmt::Display for Token<'a> {
}
}

impl<'a> From<&'a str> for Token<'a> {
#[inline]
fn from(value: &'a str) -> Self {
Self(value)
}
}

macro_rules! converters {
($name:ident) => {
impl<'a> $name<'a> {
/// Instantiates a new view with the given string.
#[inline]
#[must_use]
#[cfg(test)]
pub const fn from_str(value: &'a str) -> Self {
Self(value)
}
Expand Down Expand Up @@ -212,15 +206,9 @@ macro_rules! converters {
f.write_str(self.0)
}
}

impl<'a> From<&'a str> for $name<'a> {
#[inline]
fn from(value: &'a str) -> Self {
Self(value)
}
}
};
}

pub(crate) use converters;

/// Type of token
Expand Down
Loading

0 comments on commit e62e544

Please sign in to comment.