Skip to content

Commit

Permalink
Revert "clippy-tracing"
Browse files Browse the repository at this point in the history
This reverts commit f6dcda9.
  • Loading branch information
aditanase committed May 23, 2024
1 parent f6dcda9 commit 565e46d
Show file tree
Hide file tree
Showing 147 changed files with 0 additions and 3,134 deletions.
1 change: 0 additions & 1 deletion datafusion/core/src/bin/print_config_docs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

use datafusion::config::ConfigOptions;

#[tracing::instrument(level = "info", skip())]
fn main() {
let docs = ConfigOptions::generate_config_markdown();
println!("{docs}");
Expand Down
40 changes: 0 additions & 40 deletions datafusion/core/src/catalog/information_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,6 @@ pub struct InformationSchemaProvider {
}

impl InformationSchemaProvider {
#[tracing::instrument(level = "info", skip(catalog_list))]
/// Creates a new [`InformationSchemaProvider`] for the provided `catalog_list`
pub fn new(catalog_list: Arc<dyn CatalogProviderList>) -> Self {
Self {
Expand All @@ -79,7 +78,6 @@ struct InformationSchemaConfig {
}

impl InformationSchemaConfig {
#[tracing::instrument(level = "info", skip(self, builder))]
/// Construct the `information_schema.tables` virtual table
async fn make_tables(
&self,
Expand Down Expand Up @@ -122,7 +120,6 @@ impl InformationSchemaConfig {
Ok(())
}

#[tracing::instrument(level = "info", skip(self, builder))]
async fn make_schemata(&self, builder: &mut InformationSchemataBuilder) {
for catalog_name in self.catalog_list.catalog_names() {
let catalog = self.catalog_list.catalog(&catalog_name).unwrap();
Expand All @@ -138,7 +135,6 @@ impl InformationSchemaConfig {
}
}

#[tracing::instrument(level = "info", skip(self, builder))]
async fn make_views(
&self,
builder: &mut InformationSchemaViewBuilder,
Expand Down Expand Up @@ -168,7 +164,6 @@ impl InformationSchemaConfig {
Ok(())
}

#[tracing::instrument(level = "info", skip(self, builder))]
/// Construct the `information_schema.columns` virtual table
async fn make_columns(
&self,
Expand Down Expand Up @@ -204,7 +199,6 @@ impl InformationSchemaConfig {
Ok(())
}

#[tracing::instrument(level = "info", skip(self, config_options, builder))]
/// Construct the `information_schema.df_settings` virtual table
fn make_df_settings(
&self,
Expand All @@ -219,20 +213,17 @@ impl InformationSchemaConfig {

#[async_trait]
impl SchemaProvider for InformationSchemaProvider {
#[tracing::instrument(level = "info", skip(self))]
fn as_any(&self) -> &dyn Any {
self
}

#[tracing::instrument(level = "info", skip(self))]
fn table_names(&self) -> Vec<String> {
INFORMATION_SCHEMA_TABLES
.iter()
.map(|t| t.to_string())
.collect()
}

#[tracing::instrument(level = "info", skip(self, name))]
async fn table(
&self,
name: &str,
Expand All @@ -252,7 +243,6 @@ impl SchemaProvider for InformationSchemaProvider {
)))
}

#[tracing::instrument(level = "info", skip(self, name))]
fn table_exist(&self, name: &str) -> bool {
INFORMATION_SCHEMA_TABLES.contains(&name.to_ascii_lowercase().as_str())
}
Expand All @@ -264,7 +254,6 @@ struct InformationSchemaTables {
}

impl InformationSchemaTables {
#[tracing::instrument(level = "info", skip(config))]
fn new(config: InformationSchemaConfig) -> Self {
let schema = Arc::new(Schema::new(vec![
Field::new("table_catalog", DataType::Utf8, false),
Expand All @@ -276,7 +265,6 @@ impl InformationSchemaTables {
Self { schema, config }
}

#[tracing::instrument(level = "info", skip(self))]
fn builder(&self) -> InformationSchemaTablesBuilder {
InformationSchemaTablesBuilder {
catalog_names: StringBuilder::new(),
Expand All @@ -289,12 +277,10 @@ impl InformationSchemaTables {
}

impl PartitionStream for InformationSchemaTables {
#[tracing::instrument(level = "info", skip(self))]
fn schema(&self) -> &SchemaRef {
&self.schema
}

#[tracing::instrument(level = "info", skip(self, _ctx))]
fn execute(&self, _ctx: Arc<TaskContext>) -> SendableRecordBatchStream {
let mut builder = self.builder();
let config = self.config.clone();
Expand All @@ -321,7 +307,6 @@ struct InformationSchemaTablesBuilder {
}

impl InformationSchemaTablesBuilder {
#[tracing::instrument(level = "info", skip(self, catalog_name, schema_name, table_name, table_type))]
fn add_table(
&mut self,
catalog_name: impl AsRef<str>,
Expand All @@ -340,7 +325,6 @@ impl InformationSchemaTablesBuilder {
});
}

#[tracing::instrument(level = "info", skip(self))]
fn finish(&mut self) -> RecordBatch {
RecordBatch::try_new(
self.schema.clone(),
Expand All @@ -361,7 +345,6 @@ struct InformationSchemaViews {
}

impl InformationSchemaViews {
#[tracing::instrument(level = "info", skip(config))]
fn new(config: InformationSchemaConfig) -> Self {
let schema = Arc::new(Schema::new(vec![
Field::new("table_catalog", DataType::Utf8, false),
Expand All @@ -373,7 +356,6 @@ impl InformationSchemaViews {
Self { schema, config }
}

#[tracing::instrument(level = "info", skip(self))]
fn builder(&self) -> InformationSchemaViewBuilder {
InformationSchemaViewBuilder {
catalog_names: StringBuilder::new(),
Expand All @@ -386,12 +368,10 @@ impl InformationSchemaViews {
}

impl PartitionStream for InformationSchemaViews {
#[tracing::instrument(level = "info", skip(self))]
fn schema(&self) -> &SchemaRef {
&self.schema
}

#[tracing::instrument(level = "info", skip(self, _ctx))]
fn execute(&self, _ctx: Arc<TaskContext>) -> SendableRecordBatchStream {
let mut builder = self.builder();
let config = self.config.clone();
Expand All @@ -418,7 +398,6 @@ struct InformationSchemaViewBuilder {
}

impl InformationSchemaViewBuilder {
#[tracing::instrument(level = "info", skip(self, catalog_name, schema_name, table_name, definition))]
fn add_view(
&mut self,
catalog_name: impl AsRef<str>,
Expand All @@ -433,7 +412,6 @@ impl InformationSchemaViewBuilder {
self.definitions.append_option(definition.as_ref());
}

#[tracing::instrument(level = "info", skip(self))]
fn finish(&mut self) -> RecordBatch {
RecordBatch::try_new(
self.schema.clone(),
Expand All @@ -454,7 +432,6 @@ struct InformationSchemaColumns {
}

impl InformationSchemaColumns {
#[tracing::instrument(level = "info", skip(config))]
fn new(config: InformationSchemaConfig) -> Self {
let schema = Arc::new(Schema::new(vec![
Field::new("table_catalog", DataType::Utf8, false),
Expand All @@ -477,7 +454,6 @@ impl InformationSchemaColumns {
Self { schema, config }
}

#[tracing::instrument(level = "info", skip(self))]
fn builder(&self) -> InformationSchemaColumnsBuilder {
// StringBuilder requires providing an initial capacity, so
// pick 10 here arbitrarily as this is not performance
Expand Down Expand Up @@ -506,12 +482,10 @@ impl InformationSchemaColumns {
}

impl PartitionStream for InformationSchemaColumns {
#[tracing::instrument(level = "info", skip(self))]
fn schema(&self) -> &SchemaRef {
&self.schema
}

#[tracing::instrument(level = "info", skip(self, _ctx))]
fn execute(&self, _ctx: Arc<TaskContext>) -> SendableRecordBatchStream {
let mut builder = self.builder();
let config = self.config.clone();
Expand Down Expand Up @@ -549,7 +523,6 @@ struct InformationSchemaColumnsBuilder {
}

impl InformationSchemaColumnsBuilder {
#[tracing::instrument(level = "info", skip(self, catalog_name, schema_name, table_name, field_position, field))]
fn add_column(
&mut self,
catalog_name: &str,
Expand Down Expand Up @@ -644,7 +617,6 @@ impl InformationSchemaColumnsBuilder {
self.interval_types.append_null();
}

#[tracing::instrument(level = "info", skip(self))]
fn finish(&mut self) -> RecordBatch {
RecordBatch::try_new(
self.schema.clone(),
Expand Down Expand Up @@ -676,7 +648,6 @@ struct InformationSchemata {
}

impl InformationSchemata {
#[tracing::instrument(level = "info", skip(config))]
fn new(config: InformationSchemaConfig) -> Self {
let schema = Arc::new(Schema::new(vec![
Field::new("catalog_name", DataType::Utf8, false),
Expand All @@ -690,7 +661,6 @@ impl InformationSchemata {
Self { schema, config }
}

#[tracing::instrument(level = "info", skip(self))]
fn builder(&self) -> InformationSchemataBuilder {
InformationSchemataBuilder {
schema: self.schema.clone(),
Expand All @@ -717,7 +687,6 @@ struct InformationSchemataBuilder {
}

impl InformationSchemataBuilder {
#[tracing::instrument(level = "info", skip(self, catalog_name, schema_name, schema_owner))]
fn add_schemata(
&mut self,
catalog_name: &str,
Expand All @@ -738,7 +707,6 @@ impl InformationSchemataBuilder {
self.sql_path.append_null();
}

#[tracing::instrument(level = "info", skip(self))]
fn finish(&mut self) -> RecordBatch {
RecordBatch::try_new(
self.schema.clone(),
Expand All @@ -757,12 +725,10 @@ impl InformationSchemataBuilder {
}

impl PartitionStream for InformationSchemata {
#[tracing::instrument(level = "info", skip(self))]
fn schema(&self) -> &SchemaRef {
&self.schema
}

#[tracing::instrument(level = "info", skip(self, _ctx))]
fn execute(&self, _ctx: Arc<TaskContext>) -> SendableRecordBatchStream {
let mut builder = self.builder();
let config = self.config.clone();
Expand All @@ -783,7 +749,6 @@ struct InformationSchemaDfSettings {
}

impl InformationSchemaDfSettings {
#[tracing::instrument(level = "info", skip(config))]
fn new(config: InformationSchemaConfig) -> Self {
let schema = Arc::new(Schema::new(vec![
Field::new("name", DataType::Utf8, false),
Expand All @@ -794,7 +759,6 @@ impl InformationSchemaDfSettings {
Self { schema, config }
}

#[tracing::instrument(level = "info", skip(self))]
fn builder(&self) -> InformationSchemaDfSettingsBuilder {
InformationSchemaDfSettingsBuilder {
names: StringBuilder::new(),
Expand All @@ -806,12 +770,10 @@ impl InformationSchemaDfSettings {
}

impl PartitionStream for InformationSchemaDfSettings {
#[tracing::instrument(level = "info", skip(self))]
fn schema(&self) -> &SchemaRef {
&self.schema
}

#[tracing::instrument(level = "info", skip(self, ctx))]
fn execute(&self, ctx: Arc<TaskContext>) -> SendableRecordBatchStream {
let config = self.config.clone();
let mut builder = self.builder();
Expand All @@ -835,14 +797,12 @@ struct InformationSchemaDfSettingsBuilder {
}

impl InformationSchemaDfSettingsBuilder {
#[tracing::instrument(level = "info", skip(self, entry))]
fn add_setting(&mut self, entry: ConfigEntry) {
self.names.append_value(entry.key);
self.values.append_option(entry.value);
self.descriptions.append_value(entry.description);
}

#[tracing::instrument(level = "info", skip(self))]
fn finish(&mut self) -> RecordBatch {
RecordBatch::try_new(
self.schema.clone(),
Expand Down
9 changes: 0 additions & 9 deletions datafusion/core/src/catalog/listing_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@ pub struct ListingSchemaProvider {
}

impl ListingSchemaProvider {
#[tracing::instrument(level = "info", skip(authority, path, factory, store, format))]
/// Create a new `ListingSchemaProvider`
///
/// Arguments:
Expand All @@ -87,7 +86,6 @@ impl ListingSchemaProvider {
}
}

#[tracing::instrument(level = "info", skip(self, state))]
/// Reload table information from ObjectStore
pub async fn refresh(&self, state: &SessionState) -> datafusion_common::Result<()> {
let entries: Vec<_> = self.store.list(Some(&self.path)).try_collect().await?;
Expand Down Expand Up @@ -157,12 +155,10 @@ impl ListingSchemaProvider {

#[async_trait]
impl SchemaProvider for ListingSchemaProvider {
#[tracing::instrument(level = "info", skip(self))]
fn as_any(&self) -> &dyn Any {
self
}

#[tracing::instrument(level = "info", skip(self))]
fn table_names(&self) -> Vec<String> {
self.tables
.lock()
Expand All @@ -172,7 +168,6 @@ impl SchemaProvider for ListingSchemaProvider {
.collect()
}

#[tracing::instrument(level = "info", skip(self, name))]
async fn table(
&self,
name: &str,
Expand All @@ -185,7 +180,6 @@ impl SchemaProvider for ListingSchemaProvider {
.cloned())
}

#[tracing::instrument(level = "info", skip(self, name, table))]
fn register_table(
&self,
name: String,
Expand All @@ -198,15 +192,13 @@ impl SchemaProvider for ListingSchemaProvider {
Ok(Some(table))
}

#[tracing::instrument(level = "info", skip(self, name))]
fn deregister_table(
&self,
name: &str,
) -> datafusion_common::Result<Option<Arc<dyn TableProvider>>> {
Ok(self.tables.lock().expect("Can't lock tables").remove(name))
}

#[tracing::instrument(level = "info", skip(self, name))]
fn table_exist(&self, name: &str) -> bool {
self.tables
.lock()
Expand All @@ -224,7 +216,6 @@ struct TablePath<'a> {
}

impl TablePath<'_> {
#[tracing::instrument(level = "info", skip(self))]
/// Format the path with a '/' appended if its a directory.
/// Clients (eg. object_store listing) can and will use the presence of trailing slash as a heuristic
fn to_string(&self) -> Option<String> {
Expand Down
Loading

0 comments on commit 565e46d

Please sign in to comment.