Skip to content

Commit

Permalink
Fix clippy and cargo doc warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
yannham committed Nov 26, 2024
1 parent 4641104 commit e86bc0a
Show file tree
Hide file tree
Showing 8 changed files with 28 additions and 29 deletions.
8 changes: 4 additions & 4 deletions core/src/bytecode/ast/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -357,6 +357,10 @@ impl AstAlloc {
self.generic_arena.alloc_slice_fill_iter(iter)
}

pub fn alloc_str<'ast>(&'ast self, s: &str) -> &'ast str {
self.generic_arena.alloc_str(s)
}

pub fn node<'ast>(&'ast self, node: Node<'ast>) -> &'ast Node<'ast> {
self.generic_arena.alloc(node)
}
Expand All @@ -373,10 +377,6 @@ impl AstAlloc {
Node::String(self.generic_arena.alloc_str(s))
}

pub fn string_move<'ast>(&'ast self, s: &str) -> &'_ str {
self.generic_arena.alloc_str(s)
}

pub fn string_chunks<'ast, I>(&'ast self, chunks: I) -> Node<'ast>
where
I: IntoIterator<Item = StringChunk<Ast<'ast>>>,
Expand Down
2 changes: 1 addition & 1 deletion core/src/bytecode/ast/record.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub enum FieldPathElem<'ast> {
/// A statically known identifier.
Ident(LocIdent),
/// A dynamic field name written as a quoted expression, e.g. `"%{protocol}" = .. `. Normally,
/// the expression must be a [crate::bytecode::ast::Node::StringChunk], so we could store the
/// the expression must be a [crate::bytecode::ast::Node::StringChunks], so we could store the
/// chunks directly which would be more precise. However, it's useful to keep a general
/// [crate::bytecode::ast::Ast] to store errors when part of the field path failed to parse
/// correctly.
Expand Down
8 changes: 4 additions & 4 deletions core/src/combine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,10 @@ pub trait Combine: Default {
fn combine(left: Self, right: Self) -> Self;
}

/// [combine::Combine] doens't work for new ast nodes, which requires an external allocator to
/// create new nodes. This trait is a version that takes this additional allocator. It's temporary:
/// I suspect we won't need the original general `Combine` trait once we move to the bytecode vm,
/// as [crate::combine::Combine] is used mostly on ast-like data.
/// [Combine] doens't work for new ast nodes, which requires an external allocator to create new
/// nodes. This trait is a version that takes this additional allocator. It's temporary: I suspect
/// we won't need the original general [Combine] trait once we move to the bytecode VM, as
/// [Combine] is used mostly on ast-like data, and we will rename [CombineAlloc] to [Combine].
pub trait CombineAlloc<'ast> {
fn combine(alloc: &'ast AstAlloc, left: Self, right: Self) -> Self;
}
Expand Down
4 changes: 2 additions & 2 deletions core/src/parser/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@ pub enum LexicalError {
Generic(Range<usize>),
}

/// Error indicating that a construct is not allowed when trying to interpret an `UniRecord` as a
/// Error indicating that a construct is not allowed when trying to interpret a `UniRecord` as a
/// record type in a strict way.
///
/// See [`UniRecord::into_type_strict`](crate::parser::uniterm::UniRecord::into_type_strict).
/// See `parser::uniterm::UniRecord::into_type_strict`.
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum InvalidRecordTypeError {
/// The record type had an invalid field, for example because it had a contract,
Expand Down
2 changes: 1 addition & 1 deletion core/src/parser/grammar.lalrpop
Original file line number Diff line number Diff line change
Expand Up @@ -719,7 +719,7 @@ ConstantPatternData: ConstantPatternData<'ast> = {
// We could accept multiline strings here, but it's unlikely that this will
// result in very readable match expressions. For now we restrict ourselves
// to standard string; we can always extend to multiline later if needed
StandardStaticString => ConstantPatternData::String(alloc.string_move(&<>)),
StandardStaticString => ConstantPatternData::String(alloc.alloc_str(&<>)),
"null" => ConstantPatternData::Null,
};

Expand Down
2 changes: 1 addition & 1 deletion core/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,7 +241,7 @@ generate_compat_impl!(
generate_compat_impl!(grammar::TermParser, crate::term::RichTerm);
generate_compat_impl!(grammar::FixedTypeParser, crate::typ::Type);

impl<'ast> ErrorTolerantParserCompat<(Vec<LocIdent>, crate::term::RichTerm, RawSpan)>
impl ErrorTolerantParserCompat<(Vec<LocIdent>, crate::term::RichTerm, RawSpan)>
for grammar::CliFieldAssignmentParser
{
fn parse_tolerant_compat(
Expand Down
10 changes: 5 additions & 5 deletions core/src/parser/uniterm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -388,14 +388,14 @@ impl<'ast> UniRecord<'ast> {
annotation:
Annotation {
typ: Some(typ),
contracts,
contracts: [],
},
opt: false,
not_exported: false,
priority: MergePriority::Neutral,
},
pos: _,
} if contracts.is_empty() => Ok(RecordRows(RecordRowsF::Extend {
} => Ok(RecordRows(RecordRowsF::Extend {
row: RecordRow {
id,
typ: alloc.type_data(typ.typ, typ.pos),
Expand Down Expand Up @@ -439,7 +439,7 @@ impl<'ast> UniRecord<'ast> {
if field_def.path.len() > 1 {
let span = field_def
.path
.into_iter()
.iter()
.map(|path_elem| path_elem.pos().unwrap())
.reduce(|acc, span| acc.fuse(span).unwrap_or(acc))
// We already checked that the path is non-empty.
Expand Down Expand Up @@ -766,7 +766,7 @@ where
) -> Result<Option<Self>, ParseError>;
}

impl<'ast, 'a> FixTypeVars<'ast> for Type<'ast> {
impl<'ast> FixTypeVars<'ast> for Type<'ast> {
fn fix_type_vars_env(
&self,
alloc: &'ast AstAlloc,
Expand Down Expand Up @@ -1068,7 +1068,7 @@ pub fn fix_field_types<'ast>(
.map(|ctr| {
Ok(ctr
.fix_type_vars_ref(alloc, span)?
.map(|typ| Cow::Owned(typ))
.map(Cow::Owned)
.unwrap_or(Cow::Borrowed(ctr)))
})
.collect();
Expand Down
21 changes: 10 additions & 11 deletions core/src/parser/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use crate::{
files::FileId,
fun,
identifier::LocIdent,
label::{Label, MergeLabel},
label::Label,
position::{RawSpan, TermPos},
primop_app,
typ::Type,
Expand Down Expand Up @@ -180,7 +180,6 @@ impl EtaExpand for InfixOp {
}
InfixOp(op) => {
let vars: Vec<_> = (0..op.arity())
.into_iter()
.map(|i| LocIdent::from(format!("x{i}")))
.collect();
let fun_args: Vec<_> = vars.iter().map(|arg| pattern::Pattern::any(*arg)).collect();
Expand Down Expand Up @@ -393,11 +392,11 @@ pub fn mk_let<'ast>(
Ok(alloc.let_block(bindings, body, rec))
}

pub fn mk_import_based_on_filename<'ast>(
alloc: &'ast AstAlloc,
pub fn mk_import_based_on_filename(
alloc: &AstAlloc,
path: String,
_span: RawSpan,
) -> Result<Node<'ast>, ParseError> {
) -> Result<Node<'_>, ParseError> {
let path = OsString::from(path);
let format: Option<InputFormat> =
InputFormat::from_path(std::path::Path::new(path.as_os_str()));
Expand All @@ -408,12 +407,12 @@ pub fn mk_import_based_on_filename<'ast>(
Ok(alloc.import_path(path, format))
}

pub fn mk_import_explicit<'ast>(
alloc: &'ast AstAlloc,
pub fn mk_import_explicit(
alloc: &AstAlloc,
path: String,
format: LocIdent,
span: RawSpan,
) -> Result<Node<'ast>, ParseError> {
) -> Result<Node<'_>, ParseError> {
let path = OsString::from(path);
let Some(format) = InputFormat::from_tag(format.label()) else {
return Err(ParseError::InvalidImportFormat { span });
Expand All @@ -428,7 +427,7 @@ pub fn mk_import_explicit<'ast>(
/// indentation level of a line is the number of consecutive whitespace characters, which are
/// either a space or a tab, counted from the beginning of the line. If a line is empty or consist
/// only of whitespace characters, it is ignored.
pub fn min_indent<'ast>(chunks: &[StringChunk<Ast<'ast>>]) -> usize {
pub fn min_indent(chunks: &[StringChunk<Ast<'_>>]) -> usize {
let mut min: usize = usize::MAX;
let mut current = 0;
let mut start_line = true;
Expand Down Expand Up @@ -521,12 +520,12 @@ pub fn min_indent<'ast>(chunks: &[StringChunk<Ast<'ast>>]) -> usize {
///not sth
/// end"
/// ```
pub fn strip_indent<'ast>(chunks: &mut Vec<StringChunk<Ast<'ast>>>) {
pub fn strip_indent(chunks: &mut [StringChunk<Ast<'_>>]) {
if chunks.is_empty() {
return;
}

let min = min_indent(&chunks);
let min = min_indent(chunks);
let mut current = 0;
let mut start_line = true;
let chunks_len = chunks.len();
Expand Down

0 comments on commit e86bc0a

Please sign in to comment.