diff --git a/Cargo.toml b/Cargo.toml index 574a93c8..0c038fae 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -36,7 +36,7 @@ ahash = "0.8.3" lazy_static = "1.4.0" comfy-table = "7.0.1" bytes = "1.5.0" -kip_db = "0.1.2-alpha.19" +kip_db = "0.1.2-alpha.20" rust_decimal = "1" csv = "1" regex = "1.10.2" diff --git a/src/binder/aggregate.rs b/src/binder/aggregate.rs index 2ac26f0f..159be108 100644 --- a/src/binder/aggregate.rs +++ b/src/binder/aggregate.rs @@ -77,7 +77,7 @@ impl<'a, T: Transaction> Binder<'a, T> { return_orderby.push(SortField::new( expr, asc.map_or(true, |asc| asc), - nulls_first.map_or(false, |first| first), + nulls_first.map_or(true, |first| first), )); } Some(return_orderby) diff --git a/src/binder/alter_table.rs b/src/binder/alter_table.rs index 8c4c1aaf..1e4b9948 100644 --- a/src/binder/alter_table.rs +++ b/src/binder/alter_table.rs @@ -2,7 +2,7 @@ use sqlparser::ast::{AlterTableOperation, ObjectName}; use std::sync::Arc; -use super::Binder; +use super::{is_valid_identifier, Binder}; use crate::binder::{lower_case_name, split_name, BindError}; use crate::planner::operator::alter_table::add_column::AddColumnOperator; use crate::planner::operator::alter_table::drop_column::DropColumnOperator; @@ -17,7 +17,7 @@ impl<'a, T: Transaction> Binder<'a, T> { name: &ObjectName, operation: &AlterTableOperation, ) -> Result { - let table_name: Arc = Arc::new(split_name(&lower_case_name(name))?.1.to_string()); + let table_name: Arc = Arc::new(split_name(&lower_case_name(name))?.to_string()); if let Some(table) = self.context.table(table_name.clone()) { let plan = match operation { @@ -27,12 +27,18 @@ impl<'a, T: Transaction> Binder<'a, T> { column_def, } => { let plan = ScanOperator::build(table_name.clone(), table); + let column = self.bind_column(column_def)?; + if !is_valid_identifier(column.name()) { + return Err(BindError::InvalidColumn( + "illegal column naming".to_string(), + )); + } LogicalPlan { operator: Operator::AddColumn(AddColumnOperator { table_name, if_not_exists: *if_not_exists, - column: self.bind_column(column_def)?, + column, }), childrens: vec![plan], } diff --git a/src/binder/create_table.rs b/src/binder/create_table.rs index abe20525..e4a976c1 100644 --- a/src/binder/create_table.rs +++ b/src/binder/create_table.rs @@ -3,7 +3,7 @@ use sqlparser::ast::{ColumnDef, ColumnOption, ObjectName, TableConstraint}; use std::collections::HashSet; use std::sync::Arc; -use super::Binder; +use super::{is_valid_identifier, Binder}; use crate::binder::{lower_case_name, split_name, BindError}; use crate::catalog::{ColumnCatalog, ColumnDesc}; use crate::expression::ScalarExpression; @@ -24,9 +24,12 @@ impl<'a, T: Transaction> Binder<'a, T> { if_not_exists: bool, ) -> Result { let name = lower_case_name(name); - let (_, name) = split_name(&name)?; + let name = split_name(&name)?; let table_name = Arc::new(name.to_string()); + if !is_valid_identifier(&table_name) { + return Err(BindError::InvalidTable("illegal table naming".to_string())); + } { // check duplicated column names let mut set = HashSet::new(); @@ -35,6 +38,11 @@ impl<'a, T: Transaction> Binder<'a, T> { if !set.insert(col_name.clone()) { return Err(BindError::AmbiguousColumn(col_name.to_string())); } + if !is_valid_identifier(col_name) { + return Err(BindError::InvalidColumn( + "illegal column naming".to_string(), + )); + } } } let mut columns: Vec = columns diff --git a/src/binder/delete.rs b/src/binder/delete.rs index e7148531..7657a77d 100644 --- a/src/binder/delete.rs +++ b/src/binder/delete.rs @@ -13,7 +13,7 @@ impl<'a, T: Transaction> Binder<'a, T> { ) -> Result { if let TableFactor::Table { name, alias, .. } = &from.relation { let name = lower_case_name(name); - let (_, name) = split_name(&name)?; + let name = split_name(&name)?; let (table_name, mut plan) = self._bind_single_table_ref(None, name, Self::trans_alias(alias))?; diff --git a/src/binder/drop_table.rs b/src/binder/drop_table.rs index e2296d2c..2eec4562 100644 --- a/src/binder/drop_table.rs +++ b/src/binder/drop_table.rs @@ -13,7 +13,7 @@ impl<'a, T: Transaction> Binder<'a, T> { if_exists: &bool, ) -> Result { let name = lower_case_name(name); - let (_, name) = split_name(&name)?; + let name = split_name(&name)?; let table_name = Arc::new(name.to_string()); let plan = LogicalPlan { diff --git a/src/binder/insert.rs b/src/binder/insert.rs index aea61f4d..2a4e6c36 100644 --- a/src/binder/insert.rs +++ b/src/binder/insert.rs @@ -21,7 +21,7 @@ impl<'a, T: Transaction> Binder<'a, T> { is_overwrite: bool, ) -> Result { let name = lower_case_name(&name); - let (_, name) = split_name(&name)?; + let name = split_name(&name)?; let table_name = Arc::new(name.to_string()); if let Some(table) = self.context.table(table_name.clone()) { diff --git a/src/binder/mod.rs b/src/binder/mod.rs index b56fc686..f2261527 100644 --- a/src/binder/mod.rs +++ b/src/binder/mod.rs @@ -15,7 +15,7 @@ mod update; use sqlparser::ast::{Ident, ObjectName, ObjectType, SetExpr, Statement}; use std::collections::BTreeMap; -use crate::catalog::{CatalogError, TableCatalog, TableName, DEFAULT_SCHEMA_NAME}; +use crate::catalog::{CatalogError, TableCatalog, TableName}; use crate::expression::ScalarExpression; use crate::planner::operator::join::JoinType; use crate::planner::LogicalPlan; @@ -199,11 +199,10 @@ fn lower_case_name(name: &ObjectName) -> ObjectName { } /// Split an object name into `(schema name, table name)`. -fn split_name(name: &ObjectName) -> Result<(&str, &str), BindError> { +fn split_name(name: &ObjectName) -> Result<&str, BindError> { Ok(match name.0.as_slice() { - [table] => (DEFAULT_SCHEMA_NAME, &table.value), - [schema, table] => (&schema.value, &table.value), - _ => return Err(BindError::InvalidTableName(name.0.clone())), + [table] => &table.value, + _ => return Err(BindError::InvalidTable(name.to_string())), }) } @@ -213,8 +212,6 @@ pub enum BindError { UnsupportedStmt(String), #[error("invalid table {0}")] InvalidTable(String), - #[error("invalid table name: {0:?}")] - InvalidTableName(Vec), #[error("invalid column {0}")] InvalidColumn(String), #[error("ambiguous column {0}")] @@ -237,9 +234,15 @@ pub enum BindError { UnsupportedCopySource(String), } +pub(crate) fn is_valid_identifier(s: &str) -> bool { + s.chars().all(|c| c.is_alphanumeric() || c == '_') + && !s.chars().next().unwrap_or_default().is_numeric() + && !s.chars().all(|c| c == '_') +} + #[cfg(test)] pub mod test { - use crate::binder::{Binder, BinderContext}; + use crate::binder::{is_valid_identifier, Binder, BinderContext}; use crate::catalog::{ColumnCatalog, ColumnDesc}; use crate::execution::ExecutorError; use crate::planner::LogicalPlan; @@ -308,4 +311,16 @@ pub mod test { Ok(binder.bind(&stmt[0])?) } + + #[test] + pub fn test_valid_identifier() { + assert!(is_valid_identifier("valid_table")); + assert!(is_valid_identifier("valid_column")); + assert!(is_valid_identifier("_valid_column")); + assert!(is_valid_identifier("valid_column_1")); + + assert!(!is_valid_identifier("invalid_name&")); + assert!(!is_valid_identifier("1_invalid_name")); + assert!(!is_valid_identifier("____")); + } } diff --git a/src/binder/select.rs b/src/binder/select.rs index 9668da49..2a8324ca 100644 --- a/src/binder/select.rs +++ b/src/binder/select.rs @@ -16,9 +16,7 @@ use crate::{ use super::Binder; use crate::binder::BindError; -use crate::catalog::{ - ColumnCatalog, TableCatalog, TableName, DEFAULT_DATABASE_NAME, DEFAULT_SCHEMA_NAME, -}; +use crate::catalog::{ColumnCatalog, TableCatalog, TableName}; use crate::execution::executor::dql::join::joins_nullable; use crate::expression::BinaryOperator; use crate::planner::operator::join::JoinCondition; @@ -155,11 +153,9 @@ impl<'a, T: Transaction> Binder<'a, T> { .map(|ident| Ident::new(ident.value.to_lowercase())) .collect_vec(); - let (_database, _schema, table): (&str, &str, &str) = match obj_name.as_slice() { - [table] => (DEFAULT_DATABASE_NAME, DEFAULT_SCHEMA_NAME, &table.value), - [schema, table] => (DEFAULT_DATABASE_NAME, &schema.value, &table.value), - [database, schema, table] => (&database.value, &schema.value, &table.value), - _ => return Err(BindError::InvalidTableName(obj_name)), + let table: &str = match obj_name.as_slice() { + [table] => &table.value, + _ => return Err(BindError::InvalidTable(obj_name.iter().join(","))), }; let (table, plan) = diff --git a/src/binder/truncate.rs b/src/binder/truncate.rs index 17ce7818..4dfb920c 100644 --- a/src/binder/truncate.rs +++ b/src/binder/truncate.rs @@ -9,7 +9,7 @@ use std::sync::Arc; impl<'a, T: Transaction> Binder<'a, T> { pub(crate) fn bind_truncate(&mut self, name: &ObjectName) -> Result { let name = lower_case_name(name); - let (_, name) = split_name(&name)?; + let name = split_name(&name)?; let table_name = Arc::new(name.to_string()); let plan = LogicalPlan { diff --git a/src/binder/update.rs b/src/binder/update.rs index 784e7369..9703cf43 100644 --- a/src/binder/update.rs +++ b/src/binder/update.rs @@ -18,7 +18,7 @@ impl<'a, T: Transaction> Binder<'a, T> { ) -> Result { if let TableFactor::Table { name, .. } = &to.relation { let name = lower_case_name(name); - let (_, name) = split_name(&name)?; + let name = split_name(&name)?; let table_name = Arc::new(name.to_string()); let mut plan = self.bind_table_ref(slice::from_ref(to))?; diff --git a/src/catalog/mod.rs b/src/catalog/mod.rs index b0d077f0..7060a59d 100644 --- a/src/catalog/mod.rs +++ b/src/catalog/mod.rs @@ -1,18 +1,9 @@ // Module: catalog -use std::sync::Arc; pub(crate) use self::column::*; -pub(crate) use self::root::*; pub(crate) use self::table::*; -/// The type of catalog reference. -pub type RootRef = Arc; - -pub(crate) static DEFAULT_DATABASE_NAME: &str = "kipsql"; -pub(crate) static DEFAULT_SCHEMA_NAME: &str = "kipsql"; - mod column; -mod root; mod table; #[derive(thiserror::Error, Debug)] diff --git a/src/catalog/root.rs b/src/catalog/root.rs deleted file mode 100644 index d96f782d..00000000 --- a/src/catalog/root.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::collections::BTreeMap; - -use crate::catalog::{CatalogError, ColumnCatalog, TableCatalog, TableName}; - -#[derive(Debug, Clone)] -pub struct RootCatalog { - table_idxs: BTreeMap, -} - -impl Default for RootCatalog { - fn default() -> Self { - Self::new() - } -} - -impl RootCatalog { - #[allow(dead_code)] - pub fn new() -> Self { - RootCatalog { - table_idxs: Default::default(), - } - } - - pub(crate) fn get_table(&self, name: &String) -> Option<&TableCatalog> { - self.table_idxs.get(name) - } - - pub(crate) fn add_table( - &mut self, - table_name: TableName, - columns: Vec, - ) -> Result { - if self.table_idxs.contains_key(&table_name) { - return Err(CatalogError::Duplicated("column", table_name.to_string())); - } - let table = TableCatalog::new(table_name.clone(), columns)?; - - self.table_idxs.insert(table_name.clone(), table); - - Ok(table_name) - } - - pub(crate) fn drop_table(&mut self, table_name: &String) -> Result<(), CatalogError> { - self.table_idxs - .retain(|name, _| name.as_str() != table_name); - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::catalog::{ColumnCatalog, ColumnDesc}; - use crate::types::LogicalType; - use std::sync::Arc; - - #[test] - fn test_root_catalog() { - let mut root_catalog = RootCatalog::new(); - - let col0 = ColumnCatalog::new( - "a".to_string(), - false, - ColumnDesc::new(LogicalType::Integer, false, false, None), - None, - ); - let col1 = ColumnCatalog::new( - "b".to_string(), - false, - ColumnDesc::new(LogicalType::Boolean, false, false, None), - None, - ); - let col_catalogs = vec![col0, col1]; - - let table_id_1 = root_catalog - .add_table(Arc::new("test_table_1".to_string()), col_catalogs.clone()) - .unwrap(); - - let table_id_2 = root_catalog - .add_table(Arc::new("test_table_2".to_string()), col_catalogs) - .unwrap(); - - assert_ne!(table_id_1, table_id_2); - } -} diff --git a/src/execution/executor/dql/sort.rs b/src/execution/executor/dql/sort.rs index 25e3c503..ad64503e 100644 --- a/src/execution/executor/dql/sort.rs +++ b/src/execution/executor/dql/sort.rs @@ -2,10 +2,41 @@ use crate::execution::executor::{BoxedExecutor, Executor}; use crate::execution::ExecutorError; use crate::planner::operator::sort::{SortField, SortOperator}; use crate::storage::Transaction; +use crate::types::errors::TypeError; use crate::types::tuple::Tuple; use futures_async_stream::try_stream; +use itertools::Itertools; use std::cell::RefCell; -use std::cmp::Ordering; +use std::mem; + +const BUCKET_SIZE: usize = u8::MAX as usize + 1; + +// LSD Radix Sort +fn radix_sort(mut tuples: Vec<(T, Vec)>) -> Vec { + if let Some(max_len) = tuples.iter().map(|(_, bytes)| bytes.len()).max() { + // init buckets + let mut temp_buckets = Vec::with_capacity(BUCKET_SIZE); + for _ in 0..BUCKET_SIZE { + temp_buckets.push(Vec::new()); + } + + for i in (0..max_len).rev() { + for (t, bytes) in tuples { + let index = if bytes.len() > i { bytes[i] } else { 0 }; + + temp_buckets[index as usize].push((t, bytes)); + } + + tuples = temp_buckets + .iter_mut() + .map(|group| mem::replace(group, vec![])) + .flatten() + .collect_vec(); + } + return tuples.into_iter().map(|(tuple, _)| tuple).collect_vec(); + } + Vec::new() +} pub struct Sort { sort_fields: Vec, @@ -43,51 +74,33 @@ impl Sort { for tuple in input { tuples.push(tuple?); } + let tuples_with_keys: Vec<(Tuple, Vec)> = tuples + .into_iter() + .map(|tuple| { + let mut full_key = Vec::new(); - tuples.sort_by(|tuple_1, tuple_2| { - let mut ordering = Ordering::Equal; - - for SortField { - expr, - asc, - nulls_first, - } in &sort_fields - { - let value_1 = expr.eval(tuple_1).unwrap(); - let value_2 = expr.eval(tuple_2).unwrap(); - - ordering = value_1.partial_cmp(&value_2).unwrap_or_else(|| { - match (value_1.is_null(), value_2.is_null()) { - (false, true) => { - if *nulls_first { - Ordering::Less - } else { - Ordering::Greater - } - } - (true, false) => { - if *nulls_first { - Ordering::Greater - } else { - Ordering::Less - } - } - _ => Ordering::Equal, - } - }); + for SortField { + expr, + nulls_first, + asc, + } in &sort_fields + { + let mut key = Vec::new(); - if !*asc { - ordering = ordering.reverse(); - } + expr.eval(&tuple)?.memcomparable_encode(&mut key)?; + key.push(if *nulls_first { u8::MIN } else { u8::MAX }); - if ordering != Ordering::Equal { - break; + if !asc { + for byte in key.iter_mut() { + *byte ^= 0xFF; + } + } + full_key.extend(key); } - } - - ordering - }); - + Ok::<(Tuple, Vec), TypeError>((tuple, full_key)) + }) + .try_collect()?; + let mut tuples = radix_sort(tuples_with_keys); let len = limit.unwrap_or(tuples.len()); for tuple in tuples.drain(..len) { @@ -95,3 +108,15 @@ impl Sort { } } } + +#[test] +fn test_sort() { + let tupels = vec![ + (0, "abc".as_bytes().to_vec()), + (1, "abz".as_bytes().to_vec()), + (2, "abe".as_bytes().to_vec()), + (3, "abcd".as_bytes().to_vec()), + ]; + + assert_eq!(radix_sort(tupels), vec![0, 3, 2, 1]) +} diff --git a/src/execution/mod.rs b/src/execution/mod.rs index 77764311..9f1caaed 100644 --- a/src/execution/mod.rs +++ b/src/execution/mod.rs @@ -20,7 +20,7 @@ pub enum ExecutorError { #[from] TypeError, ), - #[error("storage_ap error: {0}")] + #[error("storage error: {0}")] StorageError( #[source] #[from] diff --git a/src/storage/table_codec.rs b/src/storage/table_codec.rs index 7ac851aa..d9f415e1 100644 --- a/src/storage/table_codec.rs +++ b/src/storage/table_codec.rs @@ -2,11 +2,13 @@ use crate::catalog::{ColumnCatalog, ColumnRef}; use crate::types::errors::TypeError; use crate::types::index::{Index, IndexId, IndexMeta}; use crate::types::tuple::{Tuple, TupleId}; +use crate::types::LogicalType; use bytes::Bytes; use lazy_static::lazy_static; const BOUND_MIN_TAG: u8 = 0; const BOUND_MAX_TAG: u8 = 1; + lazy_static! { static ref ROOT_BYTES: Vec = b"Root".to_vec(); } @@ -136,7 +138,21 @@ impl TableCodec { let mut key_prefix = Self::key_prefix(CodecType::Tuple, table_name); key_prefix.push(BOUND_MIN_TAG); - tuple_id.to_primary_key(&mut key_prefix)?; + if !matches!( + tuple_id.logical_type(), + LogicalType::Tinyint + | LogicalType::Smallint + | LogicalType::Integer + | LogicalType::Bigint + | LogicalType::UTinyint + | LogicalType::USmallint + | LogicalType::UInteger + | LogicalType::UBigint + | LogicalType::Varchar(_) + ) { + return Err(TypeError::InvalidType); + } + tuple_id.memcomparable_encode(&mut key_prefix)?; Ok(key_prefix) } @@ -195,7 +211,8 @@ impl TableCodec { key_prefix.push(BOUND_MIN_TAG); for col_v in &index.column_values { - col_v.to_index_key(&mut key_prefix)?; + col_v.memcomparable_encode(&mut key_prefix)?; + key_prefix.push(BOUND_MIN_TAG); } Ok(key_prefix) diff --git a/src/types/value.rs b/src/types/value.rs index 49e99207..97d5e4e9 100644 --- a/src/types/value.rs +++ b/src/types/value.rs @@ -500,30 +500,7 @@ impl DataValue { } } - pub fn to_primary_key(&self, b: &mut Vec) -> Result<(), TypeError> { - match self { - DataValue::Int8(Some(v)) => encode_u!(b, *v as u8 ^ 0x80_u8), - DataValue::Int16(Some(v)) => encode_u!(b, *v as u16 ^ 0x8000_u16), - DataValue::Int32(Some(v)) => encode_u!(b, *v as u32 ^ 0x80000000_u32), - DataValue::Int64(Some(v)) => encode_u!(b, *v as u64 ^ 0x8000000000000000_u64), - DataValue::UInt8(Some(v)) => encode_u!(b, v), - DataValue::UInt16(Some(v)) => encode_u!(b, v), - DataValue::UInt32(Some(v)) => encode_u!(b, v), - DataValue::UInt64(Some(v)) => encode_u!(b, v), - DataValue::Utf8(Some(v)) => Self::encode_bytes(b, v.as_bytes()), - value => { - return if value.is_null() { - Err(TypeError::PrimaryKeyNotFound) - } else { - Err(TypeError::InvalidType) - } - } - } - - Ok(()) - } - - pub fn to_index_key(&self, b: &mut Vec) -> Result<(), TypeError> { + pub fn memcomparable_encode(&self, b: &mut Vec) -> Result<(), TypeError> { match self { DataValue::Int8(Some(v)) => encode_u!(b, *v as u8 ^ 0x80_u8), DataValue::Int16(Some(v)) => encode_u!(b, *v as u16 ^ 0x8000_u16), @@ -561,12 +538,11 @@ impl DataValue { encode_u!(b, u); } + DataValue::Null => (), DataValue::Decimal(Some(_v)) => todo!(), value => { - return if value.is_null() { - todo!() - } else { - Err(TypeError::InvalidType) + if !value.is_null() { + return Err(TypeError::InvalidType); } } } @@ -1059,14 +1035,14 @@ mod test { use crate::types::value::DataValue; #[test] - fn test_to_primary_key() -> Result<(), TypeError> { + fn test_mem_comparable_int() -> Result<(), TypeError> { let mut key_i8_1 = Vec::new(); let mut key_i8_2 = Vec::new(); let mut key_i8_3 = Vec::new(); - DataValue::Int8(Some(i8::MIN)).to_primary_key(&mut key_i8_1)?; - DataValue::Int8(Some(-1_i8)).to_primary_key(&mut key_i8_2)?; - DataValue::Int8(Some(i8::MAX)).to_primary_key(&mut key_i8_3)?; + DataValue::Int8(Some(i8::MIN)).memcomparable_encode(&mut key_i8_1)?; + DataValue::Int8(Some(-1_i8)).memcomparable_encode(&mut key_i8_2)?; + DataValue::Int8(Some(i8::MAX)).memcomparable_encode(&mut key_i8_3)?; println!("{:?} < {:?}", key_i8_1, key_i8_2); println!("{:?} < {:?}", key_i8_2, key_i8_3); @@ -1077,9 +1053,9 @@ mod test { let mut key_i16_2 = Vec::new(); let mut key_i16_3 = Vec::new(); - DataValue::Int16(Some(i16::MIN)).to_primary_key(&mut key_i16_1)?; - DataValue::Int16(Some(-1_i16)).to_primary_key(&mut key_i16_2)?; - DataValue::Int16(Some(i16::MAX)).to_primary_key(&mut key_i16_3)?; + DataValue::Int16(Some(i16::MIN)).memcomparable_encode(&mut key_i16_1)?; + DataValue::Int16(Some(-1_i16)).memcomparable_encode(&mut key_i16_2)?; + DataValue::Int16(Some(i16::MAX)).memcomparable_encode(&mut key_i16_3)?; println!("{:?} < {:?}", key_i16_1, key_i16_2); println!("{:?} < {:?}", key_i16_2, key_i16_3); @@ -1090,9 +1066,9 @@ mod test { let mut key_i32_2 = Vec::new(); let mut key_i32_3 = Vec::new(); - DataValue::Int32(Some(i32::MIN)).to_primary_key(&mut key_i32_1)?; - DataValue::Int32(Some(-1_i32)).to_primary_key(&mut key_i32_2)?; - DataValue::Int32(Some(i32::MAX)).to_primary_key(&mut key_i32_3)?; + DataValue::Int32(Some(i32::MIN)).memcomparable_encode(&mut key_i32_1)?; + DataValue::Int32(Some(-1_i32)).memcomparable_encode(&mut key_i32_2)?; + DataValue::Int32(Some(i32::MAX)).memcomparable_encode(&mut key_i32_3)?; println!("{:?} < {:?}", key_i32_1, key_i32_2); println!("{:?} < {:?}", key_i32_2, key_i32_3); @@ -1103,9 +1079,9 @@ mod test { let mut key_i64_2 = Vec::new(); let mut key_i64_3 = Vec::new(); - DataValue::Int64(Some(i64::MIN)).to_primary_key(&mut key_i64_1)?; - DataValue::Int64(Some(-1_i64)).to_primary_key(&mut key_i64_2)?; - DataValue::Int64(Some(i64::MAX)).to_primary_key(&mut key_i64_3)?; + DataValue::Int64(Some(i64::MIN)).memcomparable_encode(&mut key_i64_1)?; + DataValue::Int64(Some(-1_i64)).memcomparable_encode(&mut key_i64_2)?; + DataValue::Int64(Some(i64::MAX)).memcomparable_encode(&mut key_i64_3)?; println!("{:?} < {:?}", key_i64_1, key_i64_2); println!("{:?} < {:?}", key_i64_2, key_i64_3); @@ -1116,14 +1092,14 @@ mod test { } #[test] - fn test_to_index_key_f() -> Result<(), TypeError> { + fn test_mem_comparable_float() -> Result<(), TypeError> { let mut key_f32_1 = Vec::new(); let mut key_f32_2 = Vec::new(); let mut key_f32_3 = Vec::new(); - DataValue::Float32(Some(f32::MIN)).to_index_key(&mut key_f32_1)?; - DataValue::Float32(Some(-1_f32)).to_index_key(&mut key_f32_2)?; - DataValue::Float32(Some(f32::MAX)).to_index_key(&mut key_f32_3)?; + DataValue::Float32(Some(f32::MIN)).memcomparable_encode(&mut key_f32_1)?; + DataValue::Float32(Some(-1_f32)).memcomparable_encode(&mut key_f32_2)?; + DataValue::Float32(Some(f32::MAX)).memcomparable_encode(&mut key_f32_3)?; println!("{:?} < {:?}", key_f32_1, key_f32_2); println!("{:?} < {:?}", key_f32_2, key_f32_3); @@ -1134,9 +1110,9 @@ mod test { let mut key_f64_2 = Vec::new(); let mut key_f64_3 = Vec::new(); - DataValue::Float64(Some(f64::MIN)).to_index_key(&mut key_f64_1)?; - DataValue::Float64(Some(-1_f64)).to_index_key(&mut key_f64_2)?; - DataValue::Float64(Some(f64::MAX)).to_index_key(&mut key_f64_3)?; + DataValue::Float64(Some(f64::MIN)).memcomparable_encode(&mut key_f64_1)?; + DataValue::Float64(Some(-1_f64)).memcomparable_encode(&mut key_f64_2)?; + DataValue::Float64(Some(f64::MAX)).memcomparable_encode(&mut key_f64_3)?; println!("{:?} < {:?}", key_f64_1, key_f64_2); println!("{:?} < {:?}", key_f64_2, key_f64_3); diff --git a/tests/slt/create.slt b/tests/slt/create.slt index 22707f63..bf017236 100644 --- a/tests/slt/create.slt +++ b/tests/slt/create.slt @@ -4,6 +4,18 @@ create table t(id int primary key, v1 int, v2 int, v3 int) statement error create table t(id int primary key, v1 int, v2 int, v3 int) +statement error +create table invalid_name&(id int primary key, v1 int) + +statement error +create table 1_invalid_name(id int primary key, v1 int) + +statement error +create table ____(id int primary key, v1 int) + +statement error +create table invalid_column(id int primary key, v1 int, v& int) + statement ok create table if not exists t(id int primary key, v1 int, v2 int, v3 int) diff --git a/tests/slt/distinct.slt b/tests/slt/distinct.slt index 9a33bac5..5bb4c47d 100644 --- a/tests/slt/distinct.slt +++ b/tests/slt/distinct.slt @@ -4,6 +4,13 @@ CREATE TABLE test (id int primary key, x int, y int); statement ok INSERT INTO test VALUES (0, 1, 1), (1, 2, 2), (2, 1, 1), (3, 3, 3); +query II rowsort +SELECT DISTINCT x FROM test; +---- +1 +2 +3 + query II SELECT DISTINCT x FROM test ORDER BY x, id; ---- @@ -11,13 +18,11 @@ SELECT DISTINCT x FROM test ORDER BY x, id; 2 3 - query I SELECT DISTINCT sum(x) FROM test ORDER BY sum(x); ---- 7 - # ORDER BY items must appear in the select list # if SELECT DISTINCT is specified statement error diff --git a/tests/slt/insert.slt b/tests/slt/insert.slt index 5abf9de2..28e8d9f9 100644 --- a/tests/slt/insert.slt +++ b/tests/slt/insert.slt @@ -8,7 +8,7 @@ statement ok insert into t values (1,1,10,100), (2,2,20,200), (3,3,30,300), (4,4,40,400) statement ok -insert into t(id, v1, v2, v3) values (5, 1,10,100) +insert into t(id, v1, v2, v3) values (5,1,10,100) statement ok insert into t(id, v1, v2) values (6,1,10) @@ -16,6 +16,15 @@ insert into t(id, v1, v2) values (6,1,10) statement ok insert into t(id, v2, v1) values (7,1,10) +statement error +insert into t(id, v1, v2, v3) values (0) + +statement error +insert into t(id, v1, v2, v3) values (0, 0) + +statement error +insert into t(id, v1, v2, v3) values (0, 0, 0) + statement ok insert into t values (8,NULL,NULL,NULL) diff --git a/tests/slt/order_by.slt b/tests/slt/order_by.slt index 9f84275b..e8cc4599 100644 --- a/tests/slt/order_by.slt +++ b/tests/slt/order_by.slt @@ -50,16 +50,26 @@ statement ok create table t(id int primary key, v1 int null, v2 int null) statement ok -insert into t values (0, 1, 0), (1, 2, 2), (2, null, 5), (3, 2, null) +insert into t values (0, 1, 0), (1, 2, 2), (2, null, 5), (3, 2, null), (4, null, null) query II select v1, v2 from t order by v1 asc, v2 asc ---- +null null null 5 1 0 2 null 2 2 +query II +select v1, v2 from t order by v1 asc nulls last, v2 asc +---- +1 0 +2 null +2 2 +null null +null 5 + statement ok drop table t