Skip to content

Commit

Permalink
Implement map remove feature
Browse files Browse the repository at this point in the history
  • Loading branch information
diondokter committed Feb 12, 2024
1 parent 3b9a6de commit e01b629
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 1 deletion.
6 changes: 6 additions & 0 deletions src/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,9 @@ impl ItemHeader {
const LENGTH_FIELD: Range<usize> = 4..6;
const LENGTH_CRC_FIELD: Range<usize> = 6..8;

/// Read the header from the flash at the given address.
///
/// If the item doesn't exist or doesn't fit between the address and the end address, none is returned.
pub async fn read_new<S: NorFlash>(
flash: &mut S,
address: u32,
Expand Down Expand Up @@ -160,6 +163,7 @@ impl ItemHeader {
})
}

/// Erase this item by setting the crc to none and overwriting the header with it
pub async fn erase_data<S: MultiwriteNorFlash>(
mut self,
flash: &mut S,
Expand All @@ -173,10 +177,12 @@ impl ItemHeader {
Ok(self)
}

/// Get the address of the start of the data for this item
pub fn data_address<S: NorFlash>(address: u32) -> u32 {
address + round_up_to_alignment::<S>(Self::LENGTH as u32)
}

/// Get the location of the next item in flash
pub fn next_item_address<S: NorFlash>(&self, address: u32) -> u32 {
let data_address = ItemHeader::data_address::<S>(address);
data_address + round_up_to_alignment::<S>(self.length as u32)
Expand Down
74 changes: 73 additions & 1 deletion src/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,9 +115,14 @@
//! # });
//! ```

use embedded_storage_async::nor_flash::MultiwriteNorFlash;

use crate::item::{find_next_free_item_spot, Item, ItemHeader, ItemIter};

use self::cache::{KeyCacheImpl, PrivateKeyCacheImpl};
use self::{
cache::{KeyCacheImpl, PrivateKeyCacheImpl},
item::ItemHeaderIter,
};

use super::*;

Expand Down Expand Up @@ -476,6 +481,73 @@ pub async fn store_item<I: StorageItem, S: NorFlash>(
}
}

pub async fn remove_item<I: StorageItem, S: MultiwriteNorFlash>(
flash: &mut S,
flash_range: Range<u32>,
mut cache: impl KeyCacheImpl<I::Key>,
data_buffer: &mut [u8],
search_key: I::Key,
) -> Result<(), MapError<I::Error, S::Error>> {
// Search for the last used page. We're gonna erase from the one after this first.
// If we get an early shutoff or cancellation, this will make it so that we don't return
// an old version of the key on the next fetch.
let last_used_page = find_first_page(
flash,
flash_range.clone(),
&mut cache,
0,
PageState::PartialOpen,
)
.await?
.unwrap_or_default();

// Go through all the pages
for page_index in get_pages::<S>(
flash_range.clone(),
next_page::<S>(flash_range.clone(), last_used_page),
) {
if get_page_state(flash, flash_range.clone(), &mut cache, page_index)
.await?
.is_open()
{
// This page is open, we don't have to check it
continue;
}

let page_data_start_address =
calculate_page_address::<S>(flash_range.clone(), page_index) + S::WORD_SIZE as u32;
let page_data_end_address =
calculate_page_end_address::<S>(flash_range.clone(), page_index) - S::WORD_SIZE as u32;

// Go through all items on the page
let mut item_headers = ItemHeaderIter::new(page_data_start_address, page_data_end_address);

while let (Some(item_header), item_address) = item_headers.next(flash).await? {
let item = item_header
.read_item(flash, data_buffer, item_address, page_data_end_address)
.await?;

match item {
item::MaybeItem::Corrupted(_, _) => continue,
item::MaybeItem::Erased(_, _) => continue,
item::MaybeItem::Present(item) => {
let item_key = I::deserialize_key_only(item.data()).map_err(MapError::Item)?;

// If this item has the same key as the key we're trying to erase, then erase the item.
// But keep going! We need to erase everything.
if item_key == search_key {
item.header
.erase_data(flash, flash_range.clone(), &mut cache, item_address)
.await?;
}
}
}
}
}

Ok(())
}

/// A way of serializing and deserializing items in the storage.
///
/// Serialized items must not be 0 bytes and may not be longer than [u16::MAX].
Expand Down

0 comments on commit e01b629

Please sign in to comment.