Skip to content

Commit

Permalink
fix pipeline schema cli command
Browse files Browse the repository at this point in the history
  • Loading branch information
sh-rp committed Jan 29, 2024
1 parent f0a385b commit 51244ce
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 10 deletions.
5 changes: 4 additions & 1 deletion dlt/cli/_dlt.py
Original file line number Diff line number Diff line change
Expand Up @@ -498,7 +498,10 @@ def main() -> int:
)
pipe_cmd_schema = pipeline_subparsers.add_parser("schema", help="Displays default schema")
pipe_cmd_schema.add_argument(
"--format", choices=["json", "yaml"], default="yaml", help="Display schema in this format"
"--format",
choices=["json", "yaml"],
default="yaml",
help="Display schema in this format",
)
pipe_cmd_schema.add_argument(
"--remove-defaults", action="store_true", help="Does not show default hint values"
Expand Down
7 changes: 6 additions & 1 deletion dlt/cli/pipeline_command.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,12 @@ def _display_pending_packages() -> Tuple[Sequence[str], Sequence[str]]:
fmt.warning("Pipeline does not have a default schema")
else:
fmt.echo("Found schema with name %s" % fmt.bold(p.default_schema_name))
schema_str = p.default_schema.to_pretty_yaml(remove_defaults=True)
format = command_kwargs.get("format")
s = p.default_schema
if format == "json":
schema_str = json.dumps(s.to_dict(remove_defaults=remove_defaults), pretty=True)
else:
schema_str = s.to_pretty_yaml(remove_defaults=remove_defaults)
fmt.echo(schema_str)

if operation == "drop":
Expand Down
11 changes: 3 additions & 8 deletions docs/website/docs/intro-snippets.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,14 +18,13 @@ def intro_snippet() -> None:
response.raise_for_status()
data.append(response.json())
# Extract, normalize, and load the data
load_info = pipeline.run(data, table_name='player')
load_info = pipeline.run(data, table_name="player")
# @@@DLT_SNIPPET_END api

assert_load_info(load_info)


def csv_snippet() -> None:

# @@@DLT_SNIPPET_START csv
import dlt
import pandas as pd
Expand All @@ -50,8 +49,8 @@ def csv_snippet() -> None:

assert_load_info(load_info)

def db_snippet() -> None:

def db_snippet() -> None:
# @@@DLT_SNIPPET_START db
import dlt
from sqlalchemy import create_engine
Expand All @@ -74,13 +73,9 @@ def db_snippet() -> None:
)

# Convert the rows into dictionaries on the fly with a map function
load_info = pipeline.run(
map(lambda row: dict(row._mapping), rows),
table_name="genome"
)
load_info = pipeline.run(map(lambda row: dict(row._mapping), rows), table_name="genome")

print(load_info)
# @@@DLT_SNIPPET_END db

assert_load_info(load_info)

0 comments on commit 51244ce

Please sign in to comment.