Skip to content

Commit

Permalink
refactor(core): yet another io refactor
Browse files Browse the repository at this point in the history
I think we got it this time, though.
  • Loading branch information
gadomski committed Sep 17, 2024
1 parent a3284f5 commit 7fa4365
Show file tree
Hide file tree
Showing 31 changed files with 1,726 additions and 893 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/core.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
- name: Check w/ reqwest
run: cargo check -F reqwest
- name: Test
run: cargo test -F geo -F geoparquet-compression -F reqwest -F object-store-full
run: cargo test -F geo -F geoparquet-compression -F reqwest -F object-store-all
test-core-with-gdal:
runs-on: ubuntu-latest
steps:
Expand Down
2 changes: 1 addition & 1 deletion cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ serde = "1"
serde_json = "1"
stac = { version = "0.9.0", path = "../core", features = [
"reqwest",
"object-store-full",
"object-store-all",
] }
stac-api = { version = "0.5.0", path = "../api", features = ["client"] }
stac-duckdb = { version = "0.0.1", path = "../duckdb", optional = true }
Expand Down
2 changes: 1 addition & 1 deletion cli/src/args/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ mod validate;

use crate::{input::Input, options::KeyValue, output::Output, Result, Value};
use clap::Parser;
use stac::io::Format;
use stac::Format;
use tokio::{sync::mpsc::Sender, task::JoinHandle};
use tracing::metadata::Level;

Expand Down
10 changes: 5 additions & 5 deletions cli/src/args/search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ async fn search_api(
}
}

#[cfg(all(feature = "duckdb", feature = "geoparquet"))]
#[cfg(feature = "duckdb")]
async fn search_geoparquet(
href: String,
mut search: Search,
Expand Down Expand Up @@ -185,20 +185,20 @@ impl Run for Args {
} else {
Some(self.collections)
};
#[cfg(all(feature = "duckdb", feature = "geoparquet"))]
#[cfg(feature = "duckdb")]
{
if self.duckdb.unwrap_or_else(|| {
matches!(
stac::io::Format::infer_from_href(&self.href),
Some(stac::io::Format::Geoparquet(_))
stac::Format::infer_from_href(&self.href),
Some(stac::Format::Geoparquet(_))
)
}) {
search_geoparquet(self.href, search, stream, self.max_items).await
} else {
search_api(self.href, search, stream, self.max_items).await
}
}
#[cfg(any(not(feature = "duckdb"), not(feature = "geoparquet")))]
#[cfg(not(feature = "duckdb"))]
{
search_api(self.href, search, stream, self.max_items).await
}
Expand Down
9 changes: 6 additions & 3 deletions cli/src/input.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::io::Read;

use crate::{options::Options, Error, Result};
use stac::{io::Format, Value};
use stac::{Format, Value};

/// The input to a CLI run.
#[derive(Debug, Default)]
Expand Down Expand Up @@ -40,15 +40,18 @@ impl Input {
/// Gets a STAC value from the input.
pub(crate) async fn get(&self) -> Result<Value> {
if let Some(href) = self.href.as_deref() {
stac::io::get_format_opts(href, self.format, self.options.iter())
self.format
.or_else(|| Format::infer_from_href(href))
.unwrap_or_default()
.get_opts(href, self.options.iter())
.await
.map_err(Error::from)
} else {
let mut buf = Vec::new();
let _ = std::io::stdin().read_to_end(&mut buf);
self.format
.unwrap_or_default()
.from_bytes(buf.into())
.from_bytes(buf)
.map_err(Error::from)
}
}
Expand Down
9 changes: 5 additions & 4 deletions cli/src/output.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

use crate::{options::Options, value::Value, Error, Result};
use object_store::PutResult;
use stac::io::{Format, IntoFormattedBytes};
use stac::{Format, ToNdjson};
use std::{path::Path, pin::Pin};
use tokio::{
fs::File,
Expand Down Expand Up @@ -57,7 +57,7 @@ impl Output {

/// Streams a value to the output
pub(crate) async fn stream(&mut self, value: Value) -> Result<()> {
let bytes = value.into_formatted_bytes(Format::NdJson)?;
let bytes = value.to_ndjson_vec()?;
self.stream.write_all(&bytes).await?;
self.stream.flush().await?;
Ok(())
Expand All @@ -66,11 +66,12 @@ impl Output {
/// Puts a value to the output.
pub(crate) async fn put(&mut self, value: Value) -> Result<Option<PutResult>> {
if let Some(href) = self.href.as_deref() {
stac::io::put_format_opts(href, value, self.format, self.options.iter())
self.format
.put_opts(href, value, self.options.iter())
.await
.map_err(Error::from)
} else {
let bytes = value.into_formatted_bytes(self.format)?;
let bytes = self.format.into_vec(value)?;
self.stream.write_all(&bytes).await?;
self.stream.flush().await?;
Ok(None)
Expand Down
30 changes: 25 additions & 5 deletions cli/src/value.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use crate::{Error, Result};
use serde::Serialize;
use stac::io::{Format, IntoFormattedBytes};
use stac::{IntoGeoparquet, ToNdjson};

/// An output value, which can either be a [serde_json::Value] or a [stac::Value].
#[derive(Debug, Serialize)]
Expand Down Expand Up @@ -41,11 +41,31 @@ impl TryFrom<Value> for stac::Value {
}
}

impl IntoFormattedBytes for Value {
fn into_formatted_bytes(self, format: Format) -> stac::Result<Vec<u8>> {
impl ToNdjson for Value {
fn to_ndjson_vec(&self) -> stac::Result<Vec<u8>> {
match self {
Self::Json(value) => value.into_formatted_bytes(format),
Self::Stac(value) => value.into_formatted_bytes(format),
Value::Json(json) => json.to_ndjson_vec(),
Value::Stac(stac) => stac.to_ndjson_vec(),
}
}

fn to_ndjson_writer(&self, writer: impl std::io::Write) -> stac::Result<()> {
match self {
Value::Json(json) => json.to_ndjson_writer(writer),
Value::Stac(stac) => stac.to_ndjson_writer(writer),
}
}
}

impl IntoGeoparquet for Value {
fn into_geoparquet_writer(
self,
writer: impl std::io::Write + Send,
compression: Option<stac::geoparquet::Compression>,
) -> stac::Result<()> {
match self {
Value::Json(json) => json.into_geoparquet_writer(writer, compression),
Value::Stac(stac) => stac.into_geoparquet_writer(writer, compression),
}
}
}
11 changes: 9 additions & 2 deletions core/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,14 +9,21 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
### Added

- Deref `ItemCollection` ([#363](https://github.com/stac-utils/stac-rs/pull/363))
- `io::Format` ([#372](https://github.com/stac-utils/stac-rs/pull/371))
- `Format` ([#371](https://github.com/stac-utils/stac-rs/pull/371), [#396](https://github.com/stac-utils/stac-rs/pull/396))
- `Error::FeatureNotEnabled` and `Error::UnsupportedGeoparquetType` ([#396](https://github.com/stac-utils/stac-rs/pull/396))
- Read unknown versions ([#378](https://github.com/stac-utils/stac-rs/pull/378))
- `io::IntoFormattedBytes` ([#386](https://github.com/stac-utils/stac-rs/pull/386))
- Conversion traits for the three formats ([#396](https://github.com/stac-utils/stac-rs/pull/396))
- `object_store` ([#382](https://github.com/stac-utils/stac-rs/pull/382))
- `stac::geoparquet::Compression`, even if geoparquet is not enabled ([#396](https://github.com/stac-utils/stac-rs/pull/396))

### Changed

- Update **geoarrow** to v0.3.0 ([#367](https://github.com/stac-utils/stac-rs/pull/367))

### Removed

- `Error::ReqwestNotEnabled` and `Error::GdalNotEnabled` ([#396](https://github.com/stac-utils/stac-rs/pull/382))

## [0.9.0] - 2024-09-05

### Added
Expand Down
4 changes: 3 additions & 1 deletion core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ object-store-aws = ["object-store", "object_store/aws"]
object-store-azure = ["object-store", "object_store/azure"]
object-store-gcp = ["object-store", "object_store/gcp"]
object-store-http = ["object-store", "object_store/http"]
object-store-full = [
object-store-all = [
"object-store-aws",
"object-store-azure",
"object-store-gcp",
Expand Down Expand Up @@ -71,6 +71,8 @@ url = "2"
assert-json-diff = "2"
bytes = "1"
rstest = "0.22"
tempdir = "0.3"
tokio = "1"
tokio-test = "0.4"

[package.metadata.docs.rs]
Expand Down
2 changes: 1 addition & 1 deletion core/data/items.ndjson
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
{"type":"Feature","stac_version":"1.0.0","stac_extensions":["https://stac-extensions.github.io/projection/v1.1.0/schema.json","https://stac-extensions.github.io/raster/v1.1.0/schema.json"],"id":"dataset","geometry":null,"properties":{"datetime":"2024-09-07T14:41:31.917359Z","proj:shape":[2667,2658]},"links":[],"assets":{"data":{"href":"/Users/gadomski/Code/stac-rs/core/assets/dataset.tif","roles":["data"],"raster:bands":[{"data_type":"uint16"}]}}}
{"type":"Feature","stac_version":"1.0.0","stac_extensions":["https://stac-extensions.github.io/projection/v1.1.0/schema.json","https://stac-extensions.github.io/raster/v1.1.0/schema.json"],"id":"dataset_geo","geometry":{"type":"Polygon","coordinates":[[[-61.2876244,72.229798],[-52.3015987,72.229798],[-52.3015987,90.0],[-61.2876244,90.0],[-61.2876244,72.229798]]],"bbox":[-61.2876244,72.229798,-52.3015987,90.0]},"bbox":[-61.2876244,72.229798,-52.3015987,90.0],"properties":{"datetime":"2024-09-07T14:41:31.917358Z","proj:epsg":32621,"proj:bbox":[373185.0,8019284.949381611,639014.9492102272,8286015.0],"proj:centroid":{"lat":73.4675736,"lon":-56.8079473},"proj:shape":[2667,2658],"proj:transform":[100.01126757344893,0.0,373185.0,0.0,-100.01126757344893,8286015.0]},"links":[],"assets":{"data":{"href":"/Users/gadomski/Code/stac-rs/core/assets/dataset_geo.tif","roles":["data"],"raster:bands":[{"data_type":"uint16","spatial_resolution":100.01126757344893}]}}}
{"type":"Feature","stac_version":"1.0.0","stac_extensions":["https://stac-extensions.github.io/projection/v1.1.0/schema.json","https://stac-extensions.github.io/raster/v1.1.0/schema.json"],"id":"dataset_geo","geometry":{"type":"Polygon","coordinates":[[[-61.2876244,72.229798],[-52.3015987,72.229798],[-52.3015987,90.0],[-61.2876244,90.0],[-61.2876244,72.229798]]],"bbox":[-61.2876244,72.229798,-52.3015987,90.0]},"bbox":[-61.2876244,72.229798,-52.3015987,90.0],"properties":{"datetime":"2024-09-07T14:41:31.917358Z","proj:epsg":32621,"proj:bbox":[373185.0,8019284.949381611,639014.9492102272,8286015.0],"proj:centroid":{"lat":73.4675736,"lon":-56.8079473},"proj:shape":[2667,2658],"proj:transform":[100.01126757344893,0.0,373185.0,0.0,-100.01126757344893,8286015.0]},"links":[],"assets":{"data":{"href":"/Users/gadomski/Code/stac-rs/core/assets/dataset_geo.tif","roles":["data"],"raster:bands":[{"data_type":"uint16","spatial_resolution":100.01126757344893}]}}}
14 changes: 7 additions & 7 deletions core/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,9 @@ pub enum Error {
#[error(transparent)]
GdalError(#[from] gdal::errors::GdalError),

/// GDAL is not enabled.
#[error("gdal is not enabled")]
GdalNotEnabled,
/// A required feature is not enabled.
#[error("{0} is not enabled")]
FeatureNotEnabled(&'static str),

/// [geoarrow::error::GeoArrowError]
#[error(transparent)]
Expand Down Expand Up @@ -114,10 +114,6 @@ pub enum Error {
#[cfg(feature = "geoparquet")]
Parquet(#[from] parquet::errors::ParquetError),

/// Returned when trying to read from a url but the `reqwest` feature is not enabled.
#[error("reqwest is not enabled")]
ReqwestNotEnabled,

/// [reqwest::Error]
#[cfg(feature = "reqwest")]
#[error(transparent)]
Expand Down Expand Up @@ -147,6 +143,10 @@ pub enum Error {
#[error("unsupported format: {0}")]
UnsupportedFormat(String),

/// Unsupported geoparquet type
#[error("unsupported geoparquet type")]
UnsupportedGeoparquetType,

/// [url::ParseError]
#[error(transparent)]
Url(#[from] url::ParseError),
Expand Down
12 changes: 5 additions & 7 deletions core/src/extensions/authentication.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
//! The Authentication extension to the STAC specification provides a standard
//! set of fields to describe authentication and authorization schemes, flows,
//! and scopes required to access [Assets](crate::Asset) and
//! [Links](crate::Link) that align with the [OpenAPI security
//! spec](https://swagger.io/docs/specification/authentication/).
//! Provides a standard set of fields to describe authentication and
//! authorization schemes, flows, and scopes required to access
//! [Assets](crate::Asset) and [Links](crate::Link) that align with the [OpenAPI
//! security spec](https://swagger.io/docs/specification/authentication/).

use crate::Extension;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::collections::HashMap;

use crate::Extension;

/// The authentication extension fields.
#[derive(Debug, Serialize, Deserialize)]
pub struct Authentication {
Expand Down
Loading

0 comments on commit 7fa4365

Please sign in to comment.