diff --git a/.github/actions-rs/grcov.yml b/.github/actions-rs/grcov.yml new file mode 100644 index 00000000..a6ef7de8 --- /dev/null +++ b/.github/actions-rs/grcov.yml @@ -0,0 +1,9 @@ +branch: true +ignore-not-existing: true +llvm: true +filter: covered +output-type: lcov +output-path: ./lcov.info +ignore: + - "third_party/*" + - "/*" diff --git a/.github/workflows/coveralls.yml b/.github/workflows/coveralls.yml new file mode 100644 index 00000000..bab8255c --- /dev/null +++ b/.github/workflows/coveralls.yml @@ -0,0 +1,52 @@ +--- +name: OpenSK code coverage report +on: + push: + paths: + - 'src/**/*.rs' + - 'libraries/**/*.rs' + pull_request: + types: [opened, synchronize, reopened] + +jobs: + coveralls: + name: OpenSK code coverage + runs-on: ubuntu-18.04 + + steps: + - uses: actions/checkout@v2 + with: + submodules: "true" + - uses: actions-rs/toolchain@v1 + with: + target: thumbv7em-none-eabi + - uses: actions-rs/toolchain@v1 + with: + toolchain: nightly + override: true + - name: Install grcov + run: if [[ ! -e ~/.cargo/bin/grcov ]]; then cargo +nightly install grcov; fi + - uses: actions/setup-python@v1 + with: + python-version: 3.7 + - name: Install Python dependencies + run: python -m pip install --upgrade pip setuptools wheel + - name: Set up OpenSK + run: ./setup.sh + + - uses: actions-rs/cargo@v1 + with: + command: test + args: --features "with_ctap1,with_nfc,std" --no-fail-fast + env: + CARGO_INCREMENTAL: '0' + RUSTFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' + RUSTDOCFLAGS: '-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Coverflow-checks=off -Cpanic=abort -Zpanic_abort_tests' + - uses: actions-rs/grcov@v0.1.5 + id: coverage + - uses: coverallsapp/github-action@1.1.3 + name: upload report to coveralls + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + path-to-lcov: ${{ steps.coverage.outputs.report }} + diff --git a/Cargo.toml b/Cargo.toml index 15984a0f..987fa0c9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -31,7 +31,6 @@ with_ctap2_1 = [] with_nfc = ["libtock_drivers/with_nfc"] [dev-dependencies] -elf2tab = "0.6.0" enum-iterator = "0.6.0" [build-dependencies] diff --git a/README.md b/README.md index 81772201..3a51ee25 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,10 @@ # OpenSK logo -![markdownlint](https://github.com/google/OpenSK/workflows/markdownlint/badge.svg?branch=master) -![pylint](https://github.com/google/OpenSK/workflows/pylint/badge.svg?branch=master) -![Cargo check](https://github.com/google/OpenSK/workflows/Cargo%20check/badge.svg?branch=master) -![Cargo format](https://github.com/google/OpenSK/workflows/Cargo%20format/badge.svg?branch=master) +![markdownlint](https://github.com/google/OpenSK/workflows/markdownlint/badge.svg?branch=stable) +![pylint](https://github.com/google/OpenSK/workflows/pylint/badge.svg?branch=stable) +![Cargo check](https://github.com/google/OpenSK/workflows/Cargo%20check/badge.svg?branch=stable) +![Cargo format](https://github.com/google/OpenSK/workflows/Cargo%20format/badge.svg?branch=stable) +[![Coverage Status](https://coveralls.io/repos/github/google/OpenSK/badge.svg?branch=stable)](https://coveralls.io/github/google/OpenSK?branch=stable) ## OpenSK @@ -201,3 +202,7 @@ cargo run --manifest-path tools/heapviz/Cargo.toml -- --logfile console.log --fp ## Contributing See [Contributing.md](docs/contributing.md). + +## Reporting a Vulnerability + +See [SECURITY.md](SECURITY.md). diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..ce1f393f --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,4 @@ +To report a security issue, please use http://g.co/vulnz. We use +http://g.co/vulnz for our intake, and do coordination and disclosure here on +GitHub (including using GitHub Security Advisory). The Google Security Team will +respond within 5 working days of your report on g.co/vulnz. diff --git a/deploy.py b/deploy.py index 2bae96cc..8fbb05b9 100755 --- a/deploy.py +++ b/deploy.py @@ -308,7 +308,7 @@ def checked_command_output(self, cmd, env=None, cwd=None): def update_rustc_if_needed(self): target_toolchain_fullstring = "stable" - with open("rust-toolchain", "r") as f: + with open("rust-toolchain", "r", encoding="utf-8") as f: target_toolchain_fullstring = f.readline().strip() target_toolchain = target_toolchain_fullstring.split("-", maxsplit=1) if len(target_toolchain) == 1: @@ -951,7 +951,16 @@ def main(args): dest="application", action="store_const", const="store_latency", - help=("Compiles and installs the store_latency example.")) + help=("Compiles and installs the store_latency example which print " + "latency statistics of the persistent store library.")) + apps_group.add_argument( + "--erase_storage", + dest="application", + action="store_const", + const="erase_storage", + help=("Compiles and installs the erase_storage example which erases " + "the storage. During operation the dongle red light is on. Once " + "the operation is completed the dongle green light is on.")) apps_group.add_argument( "--panic_test", dest="application", diff --git a/examples/erase_storage.rs b/examples/erase_storage.rs new file mode 100644 index 00000000..6076348f --- /dev/null +++ b/examples/erase_storage.rs @@ -0,0 +1,53 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#![no_std] + +extern crate lang_items; + +use core::fmt::Write; +use ctap2::embedded_flash::new_storage; +use libtock_drivers::console::Console; +use libtock_drivers::led; +use libtock_drivers::result::FlexUnwrap; +use persistent_store::{Storage, StorageIndex}; + +fn is_page_erased(storage: &dyn Storage, page: usize) -> bool { + let index = StorageIndex { page, byte: 0 }; + let length = storage.page_size(); + storage + .read_slice(index, length) + .unwrap() + .iter() + .all(|&x| x == 0xff) +} + +fn main() { + led::get(1).flex_unwrap().on().flex_unwrap(); // red on dongle + const NUM_PAGES: usize = 20; // should be at least ctap::storage::NUM_PAGES + let mut storage = new_storage(NUM_PAGES); + writeln!(Console::new(), "Erase {} pages of storage:", NUM_PAGES).unwrap(); + for page in 0..NUM_PAGES { + write!(Console::new(), "- Page {} ", page).unwrap(); + if is_page_erased(&storage, page) { + writeln!(Console::new(), "skipped (was already erased).").unwrap(); + } else { + storage.erase_page(page).unwrap(); + writeln!(Console::new(), "erased.").unwrap(); + } + } + writeln!(Console::new(), "Done.").unwrap(); + led::get(1).flex_unwrap().off().flex_unwrap(); + led::get(0).flex_unwrap().on().flex_unwrap(); // green on dongle +} diff --git a/fuzz/make_corpus.py b/fuzz/make_corpus.py index 11c8575c..a6bd9ff5 100644 --- a/fuzz/make_corpus.py +++ b/fuzz/make_corpus.py @@ -29,7 +29,7 @@ def make_corpus(corpus_dir, corpus_json): if os.path.isfile(corpus_json) and \ os.path.splitext(corpus_json)[-1] == ".json": - with open(corpus_json) as corpus_file: + with open(corpus_json, encoding="utf-8") as corpus_file: corpus = json.load(corpus_file) else: raise TypeError diff --git a/fuzzing_setup.sh b/fuzzing_setup.sh index 66edabbe..99e0532b 100755 --- a/fuzzing_setup.sh +++ b/fuzzing_setup.sh @@ -20,4 +20,4 @@ done_text="$(tput bold)DONE.$(tput sgr0)" set -e # Install cargo-fuzz library. -cargo install cargo-fuzz +cargo +stable install cargo-fuzz diff --git a/libraries/persistent_store/src/format.rs b/libraries/persistent_store/src/format.rs index 1f87ef3e..ae8d93f7 100644 --- a/libraries/persistent_store/src/format.rs +++ b/libraries/persistent_store/src/format.rs @@ -335,12 +335,12 @@ impl Format { } /// Builds the storage representation of an init info. - pub fn build_init(&self, init: InitInfo) -> WordSlice { + pub fn build_init(&self, init: InitInfo) -> StoreResult { let mut word = ERASED_WORD; - INIT_CYCLE.set(&mut word, init.cycle); - INIT_PREFIX.set(&mut word, init.prefix); - WORD_CHECKSUM.set(&mut word, 0); - word.as_slice() + INIT_CYCLE.set(&mut word, init.cycle)?; + INIT_PREFIX.set(&mut word, init.prefix)?; + WORD_CHECKSUM.set(&mut word, 0)?; + Ok(word.as_slice()) } /// Returns the storage index of the compact info of a page. @@ -368,36 +368,36 @@ impl Format { } /// Builds the storage representation of a compact info. - pub fn build_compact(&self, compact: CompactInfo) -> WordSlice { + pub fn build_compact(&self, compact: CompactInfo) -> StoreResult { let mut word = ERASED_WORD; - COMPACT_TAIL.set(&mut word, compact.tail); - WORD_CHECKSUM.set(&mut word, 0); - word.as_slice() + COMPACT_TAIL.set(&mut word, compact.tail)?; + WORD_CHECKSUM.set(&mut word, 0)?; + Ok(word.as_slice()) } /// Builds the storage representation of an internal entry. - pub fn build_internal(&self, internal: InternalEntry) -> WordSlice { + pub fn build_internal(&self, internal: InternalEntry) -> StoreResult { let mut word = ERASED_WORD; match internal { InternalEntry::Erase { page } => { - ID_ERASE.set(&mut word); - ERASE_PAGE.set(&mut word, page); + ID_ERASE.set(&mut word)?; + ERASE_PAGE.set(&mut word, page)?; } InternalEntry::Clear { min_key } => { - ID_CLEAR.set(&mut word); - CLEAR_MIN_KEY.set(&mut word, min_key); + ID_CLEAR.set(&mut word)?; + CLEAR_MIN_KEY.set(&mut word, min_key)?; } InternalEntry::Marker { count } => { - ID_MARKER.set(&mut word); - MARKER_COUNT.set(&mut word, count); + ID_MARKER.set(&mut word)?; + MARKER_COUNT.set(&mut word, count)?; } InternalEntry::Remove { key } => { - ID_REMOVE.set(&mut word); - REMOVE_KEY.set(&mut word, key); + ID_REMOVE.set(&mut word)?; + REMOVE_KEY.set(&mut word, key)?; } } - WORD_CHECKSUM.set(&mut word, 0); - word.as_slice() + WORD_CHECKSUM.set(&mut word, 0)?; + Ok(word.as_slice()) } /// Parses the first word of an entry from its storage representation. @@ -459,31 +459,31 @@ impl Format { } /// Builds the storage representation of a user entry. - pub fn build_user(&self, key: Nat, value: &[u8]) -> Vec { + pub fn build_user(&self, key: Nat, value: &[u8]) -> StoreResult> { let length = usize_to_nat(value.len()); let word_size = self.word_size(); let footer = self.bytes_to_words(length); let mut result = vec![0xff; ((1 + footer) * word_size) as usize]; result[word_size as usize..][..length as usize].copy_from_slice(value); let mut word = ERASED_WORD; - ID_HEADER.set(&mut word); + ID_HEADER.set(&mut word)?; if footer > 0 && is_erased(&result[(footer * word_size) as usize..]) { HEADER_FLIPPED.set(&mut word); *result.last_mut().unwrap() = 0x7f; } - HEADER_LENGTH.set(&mut word, length); - HEADER_KEY.set(&mut word, key); + HEADER_LENGTH.set(&mut word, length)?; + HEADER_KEY.set(&mut word, key)?; HEADER_CHECKSUM.set( &mut word, count_zeros(&result[(footer * word_size) as usize..]), - ); + )?; result[..word_size as usize].copy_from_slice(&word.as_slice()); - result + Ok(result) } /// Sets the padding bit in the first word of a user entry. - pub fn set_padding(&self, word: &mut Word) { - ID_PADDING.set(word); + pub fn set_padding(&self, word: &mut Word) -> StoreResult<()> { + ID_PADDING.set(word) } /// Sets the deleted bit in the first word of a user entry. diff --git a/libraries/persistent_store/src/format/bitfield.rs b/libraries/persistent_store/src/format/bitfield.rs index 2cffc4bf..32c0ae5a 100644 --- a/libraries/persistent_store/src/format/bitfield.rs +++ b/libraries/persistent_store/src/format/bitfield.rs @@ -42,15 +42,20 @@ impl Field { /// Sets the value of a bit field. /// - /// # Preconditions + /// # Errors /// /// - The value must fit in the bit field: `num_bits(value) < self.len`. /// - The value must only change bits from 1 to 0: `self.get(*word) & value == value`. - pub fn set(&self, word: &mut Word, value: Nat) { - debug_assert_eq!(value & self.mask(), value); + pub fn set(&self, word: &mut Word, value: Nat) -> StoreResult<()> { + if value & self.mask() != value { + return Err(StoreError::InvalidStorage); + } let mask = !(self.mask() << self.pos); word.0 &= mask | (value << self.pos); - debug_assert_eq!(self.get(*word), value); + if self.get(*word) != value { + return Err(StoreError::InvalidStorage); + } + Ok(()) } /// Returns a bit mask the length of the bit field. @@ -82,8 +87,8 @@ impl ConstField { } /// Sets the bit field to its value. - pub fn set(&self, word: &mut Word) { - self.field.set(word, self.value); + pub fn set(&self, word: &mut Word) -> StoreResult<()> { + self.field.set(word, self.value) } } @@ -135,15 +140,15 @@ impl Checksum { /// Sets the checksum to the external increment value. /// - /// # Preconditions + /// # Errors /// /// - The bits of the checksum bit field should be set to one: `self.field.get(*word) == /// self.field.mask()`. /// - The checksum value should fit in the checksum bit field: `num_bits(word.count_zeros() + /// value) < self.field.len`. - pub fn set(&self, word: &mut Word, value: Nat) { + pub fn set(&self, word: &mut Word, value: Nat) -> StoreResult<()> { debug_assert_eq!(self.field.get(*word), self.field.mask()); - self.field.set(word, word.0.count_zeros() + value); + self.field.set(word, word.0.count_zeros() + value) } } @@ -290,7 +295,7 @@ mod tests { assert_eq!(field.get(Word(0x000000f8)), 0x1f); assert_eq!(field.get(Word(0x0000ff37)), 6); let mut word = Word(0xffffffff); - field.set(&mut word, 3); + field.set(&mut word, 3).unwrap(); assert_eq!(word, Word(0xffffff1f)); } @@ -305,7 +310,7 @@ mod tests { assert!(field.check(Word(0x00000048))); assert!(field.check(Word(0x0000ff4f))); let mut word = Word(0xffffffff); - field.set(&mut word); + field.set(&mut word).unwrap(); assert_eq!(word, Word(0xffffff4f)); } @@ -333,7 +338,7 @@ mod tests { assert_eq!(field.get(Word(0x00ffff67)), Ok(4)); assert_eq!(field.get(Word(0x7fffff07)), Err(StoreError::InvalidStorage)); let mut word = Word(0x0fffffff); - field.set(&mut word, 4); + field.set(&mut word, 4).unwrap(); assert_eq!(word, Word(0x0fffff47)); } diff --git a/libraries/persistent_store/src/store.rs b/libraries/persistent_store/src/store.rs index 25594850..0d5c1e99 100644 --- a/libraries/persistent_store/src/store.rs +++ b/libraries/persistent_store/src/store.rs @@ -270,7 +270,9 @@ impl Store { self.reserve(self.format.transaction_capacity(updates))?; // Write the marker entry. let marker = self.tail()?; - let entry = self.format.build_internal(InternalEntry::Marker { count }); + let entry = self + .format + .build_internal(InternalEntry::Marker { count })?; self.write_slice(marker, &entry)?; self.init_page(marker, marker)?; // Write the updates. @@ -278,7 +280,7 @@ impl Store { for update in updates { let length = match *update { StoreUpdate::Insert { key, ref value } => { - let entry = self.format.build_user(usize_to_nat(key), value); + let entry = self.format.build_user(usize_to_nat(key), value)?; let word_size = self.format.word_size(); let footer = usize_to_nat(entry.len()) / word_size - 1; self.write_slice(tail, &entry[..(footer * word_size) as usize])?; @@ -287,7 +289,7 @@ impl Store { } StoreUpdate::Remove { key } => { let key = usize_to_nat(key); - let remove = self.format.build_internal(InternalEntry::Remove { key }); + let remove = self.format.build_internal(InternalEntry::Remove { key })?; self.write_slice(tail, &remove)?; 0 } @@ -307,7 +309,9 @@ impl Store { if min_key > self.format.max_key() { return Err(StoreError::InvalidArgument); } - let clear = self.format.build_internal(InternalEntry::Clear { min_key }); + let clear = self + .format + .build_internal(InternalEntry::Clear { min_key })?; // We always have one word available. We can't use `reserve` because this is internal // capacity, not user capacity. while self.immediate_capacity()? < 1 { @@ -373,7 +377,7 @@ impl Store { if key > self.format.max_key() || value_len > self.format.max_value_len() { return Err(StoreError::InvalidArgument); } - let entry = self.format.build_user(key, value); + let entry = self.format.build_user(key, value)?; let entry_len = usize_to_nat(entry.len()); self.reserve(entry_len / self.format.word_size())?; let tail = self.tail()?; @@ -437,7 +441,7 @@ impl Store { let init_info = self.format.build_init(InitInfo { cycle: 0, prefix: 0, - }); + })?; self.storage_write_slice(index, &init_info) } @@ -646,7 +650,9 @@ impl Store { } let tail = max(self.tail()?, head.next_page(&self.format)); let index = self.format.index_compact(head.page(&self.format)); - let compact_info = self.format.build_compact(CompactInfo { tail: tail - head }); + let compact_info = self + .format + .build_compact(CompactInfo { tail: tail - head })?; self.storage_write_slice(index, &compact_info)?; self.compact_copy() } @@ -680,7 +686,7 @@ impl Store { self.init_page(tail, tail + (length - 1))?; tail += length; } - let erase = self.format.build_internal(InternalEntry::Erase { page }); + let erase = self.format.build_internal(InternalEntry::Erase { page })?; self.write_slice(tail, &erase)?; self.init_page(tail, tail)?; self.compact_erase(tail) @@ -792,7 +798,7 @@ impl Store { let init_info = self.format.build_init(InitInfo { cycle: new_first.cycle(&self.format), prefix: new_first.word(&self.format), - }); + })?; self.storage_write_slice(index, &init_info)?; Ok(()) } @@ -800,7 +806,7 @@ impl Store { /// Sets the padding bit of a user header. fn set_padding(&mut self, pos: Position) -> StoreResult<()> { let mut word = Word::from_slice(self.read_word(pos)); - self.format.set_padding(&mut word); + self.format.set_padding(&mut word)?; self.write_slice(pos, &word.as_slice())?; Ok(()) } @@ -1110,10 +1116,12 @@ impl Store { let format = Format::new(storage).unwrap(); // Write the init info of the first page. let mut index = format.index_init(0); - let init_info = format.build_init(InitInfo { - cycle: usize_to_nat(cycle), - prefix: 0, - }); + let init_info = format + .build_init(InitInfo { + cycle: usize_to_nat(cycle), + prefix: 0, + }) + .unwrap(); storage.write_slice(index, &init_info).unwrap(); // Pad the first word of the page. This makes the store looks used, otherwise we may confuse // it with a partially initialized store. diff --git a/setup.sh b/setup.sh index 6d58053b..90ee8c86 100755 --- a/setup.sh +++ b/setup.sh @@ -43,7 +43,8 @@ rustup target add thumbv7em-none-eabi # Install dependency to create applications. mkdir -p elf2tab -cargo install elf2tab --version 0.6.0 --root elf2tab/ +rustup install stable +cargo +stable install elf2tab --version 0.6.0 --root elf2tab/ # Install python dependencies to factory configure OpenSK (crypto, JTAG lockdown) pip3 install --user --upgrade colorama tqdm cryptography "fido2>=0.9.1"