Skip to content

Commit

Permalink
feat: add std feature and almost full no_std support. (#192)
Browse files Browse the repository at this point in the history
# Rationale for this change

We want `no_std` support.

# What changes are included in this PR?

* Added a `std` and `perf` feature to `proof-of-sql`.
* Made `proof-of-sql-parser` `no_std`.
* Updated dependency features to disable `std` usage where not needed.
* Added CI jobs to ensure these features always pass.

The one caveat is that lalrpop needs a patch while waiting on
lalrpop/lalrpop#953 in order to have true
`no_std` support. To test `no_std` compatibility, we can build on a
target that does not support `std`. There is a new CI job that targets
`thumbv7em-none-eabi`. This fails without the lalrpop patch.

Once `lalrpop` cuts a release, we can simply bump the version and remove
the patch in the CI.

# Are these changes tested?

Yes
  • Loading branch information
tlovell-sxt committed Sep 29, 2024
2 parents d001f3c + df91594 commit 173ec57
Show file tree
Hide file tree
Showing 8 changed files with 62 additions and 35 deletions.
21 changes: 17 additions & 4 deletions .github/workflows/lint-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,19 @@ jobs:
run: cargo check -p proof-of-sql --no-default-features --features="test"
- name: Run cargo check (proof-of-sql) (just "blitzar" feature)
run: cargo check -p proof-of-sql --no-default-features --features="blitzar"
- name: Run cargo check (proof-of-sql) (just "arrow" feature)
run: cargo check -p proof-of-sql --no-default-features --features="arrow"
- name: Run cargo check (proof-of-sql) (just "rayon" feature)
run: cargo check -p proof-of-sql --no-default-features --features="rayon"
- name: Run cargo check (proof-of-sql) (just "perf" feature)
run: cargo check -p proof-of-sql --no-default-features --features="perf"
- name: Run cargo check (proof-of-sql) (just "std" feature)
run: cargo check -p proof-of-sql --no-default-features --features="std"
- name: Run cargo check (proof-of-sql-parser) with no_std target. This requires a lalrpop patch.
run: |
rustup target add thumbv7em-none-eabi
printf '\n[patch.crates-io]\nlalrpop = { git = "https://github.com/lalrpop/lalrpop", rev = "173597c" }\nlalrpop-util = { git = "https://github.com/lalrpop/lalrpop", rev = "173597c" }\n' >> Cargo.toml
cargo check -p proof-of-sql-parser --target thumbv7em-none-eabi
test:
name: Test Suite
Expand Down Expand Up @@ -83,10 +96,10 @@ jobs:
run: cargo test -p proof-of-sql --no-run --no-default-features --features="arrow"
- name: Dry run cargo test (proof-of-sql) (blitzar feature only)
run: cargo test -p proof-of-sql --no-run --no-default-features --features="blitzar"
- name: Dry run cargo test (proof-of-sql) (no features)
run: cargo test -p proof-of-sql --no-run --no-default-features
- name: Run cargo test (proof primitives - Dory) (no features - i.e. not using blitzar)
run: cargo test proof_primitive::dory::dory_compute_commitments_test --no-default-features
- name: Dry run cargo test (proof-of-sql) (std feature only)
run: cargo test -p proof-of-sql --no-run --no-default-features --features="std"
- name: Run cargo test (proof primitives - Dory) (std feature only - i.e. not using blitzar)
run: cargo test proof_primitive::dory::dory_compute_commitments_test --no-default-features --features="std"
- name: Run hello_world example
run: cargo run --example hello_world --features="blitzar test"
- name: Run posql_db example
Expand Down
27 changes: 13 additions & 14 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,26 +10,26 @@ version = "0.0.0" # DO NOT CHANGE THIS LINE! This will be automatically updated
license-file = "LICENSE"

[workspace.dependencies]
ahash = { version = "0.8.11", default-features = false, features = ["runtime-rng"] }
ahash = { version = "0.8.11", default-features = false }
# alloy-primitives = { version = "0.8.1" }
# alloy-sol-types = { version = "0.8.1" }
ark-bls12-381 = { version = "0.4.0" }
ark-curve25519 = { version = "0.4.0" }
ark-ec = { version = "0.4.0", features = [ "parallel" ] }
ark-ff = { version = "0.4.0", features = [ "parallel" ] }
ark-poly = { version = "0.4.0", features = [ "parallel" ] }
ark-ec = { version = "0.4.0" }
ark-ff = { version = "0.4.0" }
ark-poly = { version = "0.4.0" }
ark-serialize = { version = "0.4.0" }
ark-std = { version = "0.4.0", features = [ "parallel" ] }
ark-std = { version = "0.4.0", default-features = false }
arrayvec = { version = "0.7", default-features = false }
arrow = { version = "51.0" }
arrow-csv = { version = "51.0" }
bit-iter = { version = "1.1.1" }
bigdecimal = { version = "0.4.5", default-features = false, features = ["serde"] }
blake3 = { version = "1.3.3" }
blake3 = { version = "1.3.3", default-features = false }
blitzar = { version = "3.1.0" }
bumpalo = { version = "3.11.0" }
bytemuck = {version = "1.16.3", features = ["derive"]}
byte-slice-cast = { version = "1.2.1" }
byte-slice-cast = { version = "1.2.1", default-features = false }
clap = { version = "4.5.4" }
criterion = { version = "0.5.1" }
chrono = { version = "0.4.38", default-features = false }
Expand All @@ -38,12 +38,11 @@ derive_more = { version = "0.99" }
flexbuffers = { version = "2.0.0" }
# forge-script = { git = "https://github.com/foundry-rs/foundry", tag = "nightly-bf1a39980532f76cd76fd87ee32661180f606435" }
indexmap = { version = "2.1", default-features = false }
itertools = { version = "0.13.0" }
itertools = { version = "0.13.0", default-features = false, features = ["use_alloc"] }
lalrpop = { version = "0.21.0" }
lalrpop-util = { version = "0.20.0", default-features = false }
lazy_static = { version = "1.4.0" }
lalrpop-util = { version = "0.21.0", default-features = false }
merlin = { version = "2" }
num-traits = { version = "0.2" }
num-traits = { version = "0.2", default-features = false }
num-bigint = { version = "0.4.4", default-features = false }
opentelemetry = { version = "0.23.0" }
opentelemetry-jaeger = { version = "0.20.0" }
Expand All @@ -54,11 +53,11 @@ rand = { version = "0.8", default-features = false }
rand_core = { version = "0.6", default-features = false }
rayon = { version = "1.5" }
serde = { version = "1", default-features = false }
serde_json = { version = "1" }
snafu = { version = "0.8.4", default-features = false, features = ["std"] }
serde_json = { version = "1", default-features = false, features = ["alloc"] }
snafu = { version = "0.8.4", default-features = false }
tiny-keccak = { version = "2.0.2", features = [ "keccak" ] }
# tokio = { version = "1.39.3" }
tracing = { version = "0.1.36" }
tracing = { version = "0.1.36", default-features = false }
tracing-opentelemetry = { version = "0.22.0" }
tracing-subscriber = { version = "0.3.0" }
wasm-bindgen = { version = "0.2.92" }
Expand Down
1 change: 1 addition & 0 deletions crates/proof-of-sql-parser/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#![doc = include_str!("../README.md")]
#![no_std]
extern crate alloc;

/// Module for handling an intermediate decimal type received from the lexer.
Expand Down
12 changes: 7 additions & 5 deletions crates/proof-of-sql/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ chrono = { workspace = true, features = ["serde"] }
derive_more = { workspace = true }
indexmap = { workspace = true, features = ["serde"] }
itertools = { workspace = true }
lazy_static = { workspace = true }
merlin = { workspace = true, optional = true }
num-traits = { workspace = true }
num-bigint = { workspace = true, default-features = false }
Expand Down Expand Up @@ -74,10 +73,13 @@ flexbuffers = { workspace = true }
development = ["arrow-csv"]

[features]
default = ["arrow", "blitzar", "rayon"]
arrow = ["dep:arrow"]
blitzar = ["dep:blitzar", "dep:merlin"]
test = ["dep:rand"]
default = ["arrow", "perf"]
arrow = ["dep:arrow", "std"]
blitzar = ["dep:blitzar", "dep:merlin", "std"]
test = ["dep:rand", "std"]
perf = ["blitzar", "rayon", "ark-ec/parallel", "ark-poly/parallel", "ark-ff/asm"]
rayon = ["dep:rayon", "std"]
std = ["snafu/std"]

[lints]
workspace = true
Expand Down
1 change: 1 addition & 0 deletions crates/proof-of-sql/src/base/commitment/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
//! Types for creation and utilization of cryptographic commitments to proof-of-sql data.
use crate::base::scalar::Scalar;
use alloc::vec::Vec;
#[cfg(feature = "blitzar")]
pub use blitzar::{
compute::{init_backend, init_backend_with_config, BackendConfig},
Expand Down
1 change: 1 addition & 0 deletions crates/proof-of-sql/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#![doc = include_str!("../README.md")]
#![cfg_attr(not(feature = "std"), no_std)]
extern crate alloc;

pub mod base;
Expand Down
7 changes: 5 additions & 2 deletions crates/proof-of-sql/src/sql/parse/query_expr_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use super::ConversionError;
use crate::{
base::{
database::{ColumnType, TableRef, TestSchemaAccessor},
map::{indexmap, IndexMap},
map::{indexmap, IndexMap, IndexSet},
},
sql::{
parse::QueryExpr,
Expand Down Expand Up @@ -1881,7 +1881,10 @@ fn select_group_and_order_by_preserve_the_column_order_reference() {
let (t, accessor) = get_test_accessor();
let base_cols: [&str; N] = ["i", "i0", "i1", "s"]; // sorted because of `select: [cols = ... ]`
let base_ordering = [Asc, Desc, Asc, Desc];
for (idx, perm_cols) in base_cols.into_iter().permutations(N).unique().enumerate() {
for (idx, perm_cols) in IndexSet::from_iter(base_cols.into_iter().permutations(N))
.into_iter()
.enumerate()
{
let perm_col_plans = perm_cols
.iter()
.sorted()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ use proof_of_sql_parser::{
Identifier,
};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;

/// A group by expression
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
Expand Down Expand Up @@ -158,7 +157,7 @@ impl GroupByPostprocessing {
)
})
.collect::<PostprocessingResult<Vec<AliasedResultExpr>>>()?;
let group_by_identifiers = by_ids.into_iter().unique().collect();
let group_by_identifiers = Vec::from_iter(IndexSet::from_iter(by_ids));
Ok(Self {
remainder_exprs,
group_by_identifiers,
Expand Down Expand Up @@ -190,14 +189,22 @@ impl<S: Scalar> PostprocessingStep<S> for GroupByPostprocessing {
fn apply(&self, owned_table: OwnedTable<S>) -> PostprocessingResult<OwnedTable<S>> {
// First evaluate all the aggregated columns
let alloc = Bump::new();
let evaluated_columns: HashMap<AggregationOperator, Vec<(Identifier, OwnedColumn<S>)>> =
self.aggregation_exprs
.iter()
.map(|(agg_op, expr, id)| -> PostprocessingResult<_> {
let evaluated_owned_column = owned_table.evaluate(expr)?;
Ok((*agg_op, (*id, evaluated_owned_column)))
})
.process_results(|iter| iter.into_group_map())?;
let evaluated_columns = self
.aggregation_exprs
.iter()
.map(|(agg_op, expr, id)| -> PostprocessingResult<_> {
let evaluated_owned_column = owned_table.evaluate(expr)?;
Ok((*agg_op, (*id, evaluated_owned_column)))
})
.process_results(|iter| {
iter.fold(
IndexMap::<_, Vec<_>>::default(),
|mut lookup, (key, val)| {
lookup.entry(key).or_default().push(val);
lookup
},
)
})?;
// Next actually do the GROUP BY
let group_by_ins = self
.group_by_identifiers
Expand Down

0 comments on commit 173ec57

Please sign in to comment.