Skip to content

Commit

Permalink
Added snow spcs compute-pool deploy command (#2001)
Browse files Browse the repository at this point in the history
* Added snow spcs compute-pool deploy command

* Fixed after review
sfc-gh-astus authored Jan 20, 2025

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
1 parent b39d50b commit cd0ff29
Showing 13 changed files with 555 additions and 32 deletions.
2 changes: 2 additions & 0 deletions RELEASE-NOTES.md
Original file line number Diff line number Diff line change
@@ -47,6 +47,8 @@
* Use `--all` to fetch all columns.
* Added support for glob pattern (except `**`) in artifact paths in snowflake.yml for Streamlit.
* Added support for glob pattern (except `**`) in artifact paths in snowflake.yml for Snowpark, requires ENABLE_SNOWPARK_GLOB_SUPPORT feature flag.
* Added `--replace` flag to `snow spcs compute-pool create` command.
* Added command `snow spcs compute-pool deploy`.

## Fixes and improvements

58 changes: 55 additions & 3 deletions src/snowflake/cli/_plugins/spcs/compute_pool/commands.py
Original file line number Diff line number Diff line change
@@ -14,28 +14,40 @@

from __future__ import annotations

from typing import Optional
from typing import Dict, Optional

import typer
from click import ClickException
from click import ClickException, UsageError
from snowflake.cli._plugins.object.command_aliases import (
add_object_command_aliases,
)
from snowflake.cli._plugins.object.common import CommentOption
from snowflake.cli._plugins.spcs.common import (
validate_and_set_instances,
)
from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import (
ComputePoolEntityModel,
)
from snowflake.cli._plugins.spcs.compute_pool.manager import ComputePoolManager
from snowflake.cli.api.cli_global_context import get_cli_context
from snowflake.cli.api.commands.decorators import with_project_definition
from snowflake.cli.api.commands.flags import (
IfNotExistsOption,
OverrideableOption,
ReplaceOption,
entity_argument,
identifier_argument,
like_option,
)
from snowflake.cli.api.commands.snow_typer import SnowTyperFactory
from snowflake.cli.api.constants import ObjectType
from snowflake.cli.api.exceptions import NoProjectDefinitionError
from snowflake.cli.api.identifiers import FQN
from snowflake.cli.api.output.types import CommandResult, SingleQueryResult
from snowflake.cli.api.output.types import (
CommandResult,
MessageResult,
SingleQueryResult,
)
from snowflake.cli.api.project.util import is_valid_object_name

app = SnowTyperFactory(
@@ -124,6 +136,7 @@ def create(
),
auto_suspend_secs: int = AutoSuspendSecsOption(),
comment: Optional[str] = CommentOption(help=_COMMENT_HELP),
replace: bool = ReplaceOption(),
if_not_exists: bool = IfNotExistsOption(),
**options,
) -> CommandResult:
@@ -141,10 +154,49 @@ def create(
auto_suspend_secs=auto_suspend_secs,
comment=comment,
if_not_exists=if_not_exists,
replace=replace,
)
return SingleQueryResult(cursor)


@app.command("deploy", requires_connection=True)
@with_project_definition()
def deploy(
replace: bool = ReplaceOption(
help="Replace the compute-pool if it already exists."
),
entity_id: str = entity_argument("compute-pool"),
**options,
):
"""
Deploys a compute pool from the project definition file.
"""
cli_context = get_cli_context()
pd = cli_context.project_definition
compute_pools: Dict[str, ComputePoolEntityModel] = pd.get_entities_by_type(
entity_type="compute-pool"
)

if not compute_pools:
raise NoProjectDefinitionError(
project_type="compute pool", project_root=cli_context.project_root
)

if entity_id and entity_id not in compute_pools:
raise UsageError(f"No '{entity_id}' entity in project definition file.")
elif len(compute_pools.keys()) == 1:
entity_id = list(compute_pools.keys())[0]

if entity_id is None:
raise UsageError(
"Multiple compute pools found. Please provide entity id for the operation."
)

ComputePoolManager().deploy(compute_pool=compute_pools[entity_id], replace=replace)

return MessageResult(f"Compute pool '{entity_id}' successfully deployed.")


@app.command("stop-all", requires_connection=True)
def stop_all(name: FQN = ComputePoolNameArgument, **options) -> CommandResult:
"""
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import (
ComputePoolEntityModel,
)
from snowflake.cli.api.entities.common import EntityBase


class ComputePoolEntity(EntityBase[ComputePoolEntityModel]):
pass
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from typing import Literal, Optional

from pydantic import Field
from snowflake.cli.api.project.schemas.entities.common import EntityModelBase
from snowflake.cli.api.project.schemas.updatable_model import DiscriminatorField


class ComputePoolEntityModel(EntityModelBase):
type: Literal["compute-pool"] = DiscriminatorField() # noqa: A003
min_nodes: int = Field(title="Minimum number of nodes", default=None, ge=0)
max_nodes: Optional[int] = Field(title="Maximum number of nodes", default=None)
instance_family: str = Field(title="Name of the instance family", default=None)
auto_resume: Optional[bool] = Field(
title="The compute pool will automatically resume when a service or job is submitted to it",
default=True,
)
initially_suspended: Optional[bool] = Field(
title="Starts the compute pool in a suspended state", default=False
)
auto_suspend_seconds: Optional[int] = Field(
title="Number of seconds of inactivity after which you want Snowflake to automatically suspend the compute pool",
default=3600,
)
comment: Optional[str] = Field(title="Comment for the compute pool", default=None)
35 changes: 34 additions & 1 deletion src/snowflake/cli/_plugins/spcs/compute_pool/manager.py
Original file line number Diff line number Diff line change
@@ -16,12 +16,17 @@

from typing import List, Optional

from snowflake.cli._plugins.object.manager import ObjectManager
from snowflake.cli._plugins.spcs.common import (
NoPropertiesProvidedError,
handle_object_already_exists,
strip_empty_lines,
)
from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import (
ComputePoolEntityModel,
)
from snowflake.cli.api.constants import ObjectType
from snowflake.cli.api.identifiers import FQN
from snowflake.cli.api.sql_execution import SqlExecutionMixin
from snowflake.connector.cursor import SnowflakeCursor
from snowflake.connector.errors import ProgrammingError
@@ -39,7 +44,17 @@ def create(
auto_suspend_secs: int,
comment: Optional[str],
if_not_exists: bool,
replace: bool,
) -> SnowflakeCursor:

if replace:
object_manager = ObjectManager()
object_type = ObjectType.COMPUTE_POOL.value.cli_name
entity_id_fqn = FQN.from_string(pool_name)
if object_manager.object_exists(object_type=object_type, fqn=entity_id_fqn):
self.stop(pool_name)
object_manager.drop(object_type=object_type, fqn=entity_id_fqn)

create_statement = "CREATE COMPUTE POOL"
if if_not_exists:
create_statement = f"{create_statement} IF NOT EXISTS"
@@ -58,7 +73,25 @@ def create(
try:
return self.execute_query(strip_empty_lines(query))
except ProgrammingError as e:
handle_object_already_exists(e, ObjectType.COMPUTE_POOL, pool_name)
handle_object_already_exists(
e, ObjectType.COMPUTE_POOL, pool_name, replace_available=True
)

def deploy(
self, compute_pool: ComputePoolEntityModel, replace: bool
) -> SnowflakeCursor:
return self.create(
pool_name=compute_pool.entity_id,
min_nodes=compute_pool.min_nodes,
max_nodes=compute_pool.max_nodes,
instance_family=compute_pool.instance_family,
auto_resume=compute_pool.auto_resume,
initially_suspended=compute_pool.initially_suspended,
auto_suspend_secs=compute_pool.auto_suspend_seconds,
comment=compute_pool.comment,
if_not_exists=False,
replace=replace,
)

def stop(self, pool_name: str) -> SnowflakeCursor:
return self.execute_query(f"alter compute pool {pool_name} stop all")
8 changes: 8 additions & 0 deletions src/snowflake/cli/api/project/schemas/entities/entities.py
Original file line number Diff line number Diff line change
@@ -32,6 +32,12 @@
FunctionEntityModel,
ProcedureEntityModel,
)
from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity import (
ComputePoolEntity,
)
from snowflake.cli._plugins.spcs.compute_pool.compute_pool_entity_model import (
ComputePoolEntityModel,
)
from snowflake.cli._plugins.streamlit.streamlit_entity import StreamlitEntity
from snowflake.cli._plugins.streamlit.streamlit_entity_model import (
StreamlitEntityModel,
@@ -43,13 +49,15 @@
StreamlitEntity,
ProcedureEntity,
FunctionEntity,
ComputePoolEntity,
]
EntityModel = Union[
ApplicationEntityModel,
ApplicationPackageEntityModel,
StreamlitEntityModel,
FunctionEntityModel,
ProcedureEntityModel,
ComputePoolEntityModel,
]

ALL_ENTITIES: List[Entity] = [*get_args(Entity)]
99 changes: 99 additions & 0 deletions tests/__snapshots__/test_help_messages.ambr
Original file line number Diff line number Diff line change
@@ -5662,6 +5662,9 @@
| --comment TEXT Comment for the |
| compute pool. |
| [default: None] |
| --replace Replace this |
| object if it |
| already exists. |
| --if-not-exists Only apply this |
| operation if the |
| specified object |
@@ -5744,6 +5747,100 @@
+------------------------------------------------------------------------------+


'''
# ---
# name: test_help_messages[spcs.compute-pool.deploy]
'''

Usage: default spcs compute-pool deploy [OPTIONS] [ENTITY_ID]

Deploys a compute pool from the project definition file.

+- Arguments ------------------------------------------------------------------+
| entity_id [ENTITY_ID] ID of compute-pool entity. |
| [default: None] |
+------------------------------------------------------------------------------+
+- Options --------------------------------------------------------------------+
| --replace Replace the compute-pool if it already exists. |
| --project -p TEXT Path where Snowflake project resides. Defaults to |
| current working directory. |
| --env TEXT String in format of key=value. Overrides variables |
| from env section used for templates. |
| --help -h Show this message and exit. |
+------------------------------------------------------------------------------+
+- Connection configuration ---------------------------------------------------+
| --connection,--environment -c TEXT Name of the connection, as |
| defined in your config.toml |
| file. Default: default. |
| --host TEXT Host address for the |
| connection. Overrides the |
| value specified for the |
| connection. |
| --port INTEGER Port for the connection. |
| Overrides the value |
| specified for the |
| connection. |
| --account,--accountname TEXT Name assigned to your |
| Snowflake account. Overrides |
| the value specified for the |
| connection. |
| --user,--username TEXT Username to connect to |
| Snowflake. Overrides the |
| value specified for the |
| connection. |
| --password TEXT Snowflake password. |
| Overrides the value |
| specified for the |
| connection. |
| --authenticator TEXT Snowflake authenticator. |
| Overrides the value |
| specified for the |
| connection. |
| --private-key-file,--private… TEXT Snowflake private key file |
| path. Overrides the value |
| specified for the |
| connection. |
| --token-file-path TEXT Path to file with an OAuth |
| token that should be used |
| when connecting to Snowflake |
| --database,--dbname TEXT Database to use. Overrides |
| the value specified for the |
| connection. |
| --schema,--schemaname TEXT Database schema to use. |
| Overrides the value |
| specified for the |
| connection. |
| --role,--rolename TEXT Role to use. Overrides the |
| value specified for the |
| connection. |
| --warehouse TEXT Warehouse to use. Overrides |
| the value specified for the |
| connection. |
| --temporary-connection -x Uses connection defined with |
| command line parameters, |
| instead of one defined in |
| config |
| --mfa-passcode TEXT Token to use for |
| multi-factor authentication |
| (MFA) |
| --enable-diag Run Python connector |
| diagnostic test |
| --diag-log-path TEXT Diagnostic report path |
| --diag-allowlist-path TEXT Diagnostic report path to |
| optional allowlist |
+------------------------------------------------------------------------------+
+- Global configuration -------------------------------------------------------+
| --format [TABLE|JSON] Specifies the output format. |
| [default: TABLE] |
| --verbose -v Displays log entries for log levels info |
| and higher. |
| --debug Displays log entries for log levels debug |
| and higher; debug logs contain additional |
| information. |
| --silent Turns off intermediate output to console. |
+------------------------------------------------------------------------------+


'''
# ---
# name: test_help_messages[spcs.compute-pool.describe]
@@ -6603,6 +6700,7 @@
+------------------------------------------------------------------------------+
+- Commands -------------------------------------------------------------------+
| create Creates a new compute pool. |
| deploy Deploys a compute pool from the project definition file. |
| describe Provides description of compute pool. |
| drop Drops compute pool with given name. |
| list Lists all available compute pools. |
@@ -11113,6 +11211,7 @@
+------------------------------------------------------------------------------+
+- Commands -------------------------------------------------------------------+
| create Creates a new compute pool. |
| deploy Deploys a compute pool from the project definition file. |
| describe Provides description of compute pool. |
| drop Drops compute pool with given name. |
| list Lists all available compute pools. |
228 changes: 200 additions & 28 deletions tests/spcs/test_compute_pool.py
Original file line number Diff line number Diff line change
@@ -13,7 +13,8 @@
# limitations under the License.

import json
from unittest.mock import Mock, patch
from textwrap import dedent
from unittest.mock import Mock, call, patch

import pytest
from click import ClickException
@@ -27,17 +28,20 @@
from snowflake.cli.api.constants import ObjectType
from snowflake.cli.api.identifiers import FQN
from snowflake.cli.api.project.util import to_string_literal
from snowflake.connector import ProgrammingError
from snowflake.connector.cursor import SnowflakeCursor

from tests.spcs.test_common import SPCS_OBJECT_EXISTS_ERROR
from tests_integration.testing_utils.assertions.test_result_assertions import (
assert_that_result_is_successful_and_executed_successfully,
)


@patch(
EXECUTE_QUERY = (
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)


@patch(EXECUTE_QUERY)
def test_create(mock_execute_query):
pool_name = "test_pool"
min_nodes = 2
@@ -58,6 +62,7 @@ def test_create(mock_execute_query):
initially_suspended=initially_suspended,
auto_suspend_secs=auto_suspend_secs,
comment=comment,
replace=False,
if_not_exists=False,
)
expected_query = " ".join(
@@ -99,6 +104,7 @@ def test_create_pool_cli_defaults(mock_create, runner):
initially_suspended=False,
auto_suspend_secs=3600,
comment=None,
replace=False,
if_not_exists=False,
)

@@ -136,13 +142,12 @@ def test_create_pool_cli(mock_create, runner):
initially_suspended=True,
auto_suspend_secs=7200,
comment=to_string_literal("this is a test"),
replace=False,
if_not_exists=True,
)


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
@patch("snowflake.cli._plugins.spcs.compute_pool.manager.handle_object_already_exists")
def test_create_compute_pool_already_exists(mock_handle, mock_execute):
pool_name = "test_pool"
@@ -156,16 +161,18 @@ def test_create_compute_pool_already_exists(mock_handle, mock_execute):
initially_suspended=True,
auto_suspend_secs=7200,
comment=to_string_literal("this is a test"),
replace=False,
if_not_exists=False,
)
mock_handle.assert_called_once_with(
SPCS_OBJECT_EXISTS_ERROR, ObjectType.COMPUTE_POOL, pool_name
SPCS_OBJECT_EXISTS_ERROR,
ObjectType.COMPUTE_POOL,
pool_name,
replace_available=True,
)


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
def test_create_compute_pool_if_not_exists(mock_execute_query):
cursor = Mock(spec=SnowflakeCursor)
mock_execute_query.return_value = cursor
@@ -178,6 +185,7 @@ def test_create_compute_pool_if_not_exists(mock_execute_query):
initially_suspended=False,
auto_suspend_secs=3600,
comment=None,
replace=False,
if_not_exists=True,
)
expected_query = " ".join(
@@ -196,9 +204,183 @@ def test_create_compute_pool_if_not_exists(mock_execute_query):
assert result == cursor


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch("snowflake.cli._plugins.object.manager.ObjectManager.execute_query")
@patch(EXECUTE_QUERY)
def test_create_compute_pool_replace(
mock_execute_query, mock_execute_query_object_manager, runner
):
compute_pool_name = "test_pool"

result = runner.invoke(
[
"spcs",
"compute-pool",
"create",
compute_pool_name,
"--replace",
"--family",
"test_family",
]
)

assert result.exit_code == 0, result.output
expected_query = dedent(
f"""\
CREATE COMPUTE POOL {compute_pool_name}
MIN_NODES = 1
MAX_NODES = 1
INSTANCE_FAMILY = test_family
AUTO_RESUME = True
INITIALLY_SUSPENDED = False
AUTO_SUSPEND_SECS = 3600"""
)
mock_execute_query.assert_has_calls(
[call(f"alter compute pool {compute_pool_name} stop all"), call(expected_query)]
)
mock_execute_query_object_manager.assert_has_calls(
[
call(f"describe compute pool IDENTIFIER('{compute_pool_name}')"),
call(f"drop compute pool IDENTIFIER('{compute_pool_name}')"),
]
)


@patch(EXECUTE_QUERY)
def test_deploy(mock_execute_query, runner, project_directory):
with project_directory("spcs_compute_pool"):
result = runner.invoke(["spcs", "compute-pool", "deploy"])

assert result.exit_code == 0, result.output
assert (
"Compute pool 'test_compute_pool' successfully deployed." in result.output
)
expected_query = dedent(
"""\
CREATE COMPUTE POOL test_compute_pool
MIN_NODES = 1
MAX_NODES = 2
INSTANCE_FAMILY = CPU_X64_XS
AUTO_RESUME = True
INITIALLY_SUSPENDED = True
AUTO_SUSPEND_SECS = 60"""
)
mock_execute_query.assert_called_once_with(expected_query)


@patch("snowflake.cli._plugins.object.manager.ObjectManager.execute_query")
@patch(EXECUTE_QUERY)
def test_deploy_replace(
mock_execute_query, mock_execute_query_object_manager, runner, project_directory
):
compute_pool_name = "test_compute_pool"

with project_directory("spcs_compute_pool"):
result = runner.invoke(["spcs", "compute-pool", "deploy", "--replace"])

assert result.exit_code == 0, result.output
assert (
f"Compute pool '{compute_pool_name}' successfully deployed."
in result.output
)
expected_query = dedent(
f"""\
CREATE COMPUTE POOL {compute_pool_name}
MIN_NODES = 1
MAX_NODES = 2
INSTANCE_FAMILY = CPU_X64_XS
AUTO_RESUME = True
INITIALLY_SUSPENDED = True
AUTO_SUSPEND_SECS = 60"""
)
mock_execute_query.assert_has_calls(
[
call(f"alter compute pool {compute_pool_name} stop all"),
call(expected_query),
]
)
mock_execute_query_object_manager.assert_has_calls(
[
call(f"describe compute pool IDENTIFIER('{compute_pool_name}')"),
call(f"drop compute pool IDENTIFIER('{compute_pool_name}')"),
]
)


@patch(EXECUTE_QUERY)
def test_deploy_compute_pool_already_exists(
mock_execute_query, runner, project_directory
):
mock_execute_query.side_effect = ProgrammingError(
errno=2002, msg="Object 'test_compute_pool' already exists."
)

with project_directory("spcs_compute_pool"):
result = runner.invoke(["spcs", "compute-pool", "deploy"])

assert result.exit_code == 1, result.output
assert (
"Compute-pool TEST_COMPUTE_POOL already exists. Use --replace flag to update"
in result.output
)


def test_deploy_no_compute_pools(runner, project_directory):
with project_directory("empty_project"):
result = runner.invoke(["spcs", "compute-pool", "deploy"])

assert result.exit_code == 1, result.output
assert "No compute pool project definition found in" in result.output


def test_deploy_not_existing_entity_id(runner, project_directory):
with project_directory("spcs_compute_pool"):
result = runner.invoke(
["spcs", "compute-pool", "deploy", "not-existing-entity-id"]
)

assert result.exit_code == 2, result.output
assert (
"No 'not-existing-entity-id' entity in project definition file."
in result.output
)


@patch(EXECUTE_QUERY)
def test_deploy_multiple_compute_pools_with_entity_id(
mock_execute_query, runner, project_directory
):
with project_directory("spcs_multiple_compute_pools"):
result = runner.invoke(["spcs", "compute-pool", "deploy", "test_compute_pool"])

assert result.exit_code == 0, result.output
assert (
"Compute pool 'test_compute_pool' successfully deployed." in result.output
)
expected_query = dedent(
"""\
CREATE COMPUTE POOL test_compute_pool
MIN_NODES = 1
MAX_NODES = 2
INSTANCE_FAMILY = CPU_X64_XS
AUTO_RESUME = True
INITIALLY_SUSPENDED = True
AUTO_SUSPEND_SECS = 60"""
)
mock_execute_query.assert_called_once_with(expected_query)


def test_deploy_multiple_compute_pools(runner, project_directory):
with project_directory("spcs_multiple_compute_pools"):
result = runner.invoke(["spcs", "compute-pool", "deploy"])

assert result.exit_code == 2, result.output
assert (
"Multiple compute pools found. Please provide entity id for the operation."
in result.output
)


@patch(EXECUTE_QUERY)
def test_stop(mock_execute_query):
pool_name = "test_pool"
cursor = Mock(spec=SnowflakeCursor)
@@ -209,9 +391,7 @@ def test_stop(mock_execute_query):
assert result == cursor


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
def test_suspend(mock_execute_query):
pool_name = "test_pool"
cursor = Mock(spec=SnowflakeCursor)
@@ -246,9 +426,7 @@ def test_suspend_cli(mock_suspend, mock_cursor, runner):
assert result_json_parsed == {"status": "Statement executed successfully."}


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
def test_resume(mock_execute_query):
pool_name = "test_pool"
cursor = Mock(spec=SnowflakeCursor)
@@ -300,9 +478,7 @@ def test_compute_pool_name_callback_invalid(mock_is_valid):
assert "is not a valid compute pool name." in e.value.message


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
def test_set_property(mock_execute_query):
pool_name = "test_pool"
min_nodes = 2
@@ -401,9 +577,7 @@ def test_set_property_no_properties_cli(mock_set, runner):
)


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
def test_unset_property(mock_execute_query):
pool_name = "test_pool"
cursor = Mock(spec=SnowflakeCursor)
@@ -476,9 +650,7 @@ def test_unset_property_with_args(runner):
assert "Got unexpected extra argument" in result.output


@patch(
"snowflake.cli._plugins.spcs.compute_pool.manager.ComputePoolManager.execute_query"
)
@patch(EXECUTE_QUERY)
def test_status(mock_execute_query):
pool_name = "test_pool"
cursor = Mock(spec=SnowflakeCursor)
13 changes: 13 additions & 0 deletions tests/test_data/projects/spcs_compute_pool/snowflake.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
definition_version: "2"

entities:
test_compute_pool:
type: compute-pool
identifier:
name: test_compute_pool
min_nodes: 1
max_nodes: 2
instance_family: CPU_X64_XS
auto_resume: true
initially_suspended: true
auto_suspend_seconds: 60
33 changes: 33 additions & 0 deletions tests/test_data/projects/spcs_multiple_compute_pools/snowflake.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
definition_version: "2"

entities:
test_compute_pool:
type: compute-pool
identifier:
name: test_compute_pool
min_nodes: 1
max_nodes: 2
instance_family: CPU_X64_XS
auto_resume: true
initially_suspended: true
auto_suspend_seconds: 60
test_compute_pool_2:
type: compute-pool
identifier:
name: test_compute_pool_2
min_nodes: 1
max_nodes: 2
instance_family: CPU_X64_XS
auto_resume: true
initially_suspended: true
auto_suspend_seconds: 60
test_compute_pool_3:
type: compute-pool
identifier:
name: test_compute_pool
min_nodes: 1
max_nodes: 2
instance_family: CPU_X64_XS
auto_resume: true
initially_suspended: true
auto_suspend_seconds: 60
36 changes: 36 additions & 0 deletions tests_integration/spcs/test_compute_pool.py
Original file line number Diff line number Diff line change
@@ -40,6 +40,42 @@ def test_compute_pool(_test_steps: Tuple[ComputePoolTestSteps, str]):
test_steps.list_should_not_return_compute_pool(compute_pool_name)


@pytest.mark.integration
def test_compute_pool_deploy(
_test_steps: Tuple[ComputePoolTestSteps, str],
project_directory,
alter_snowflake_yml,
):

test_steps, compute_pool_name = _test_steps

with project_directory("spcs_compute_pool"):
alter_snowflake_yml(
"snowflake.yml",
"entities",
{
compute_pool_name: {
"type": "compute-pool",
"identifier": {
"name": compute_pool_name,
},
"min_nodes": 1,
"max_nodes": 1,
"instance_family": "CPU_X64_XS",
"auto_resume": True,
"initially_suspended": True,
"auto_suspend_seconds": 60,
}
},
)
test_steps.deploy_compute_pool(compute_pool_name)
test_steps.list_should_return_compute_pool(compute_pool_name)
test_steps.second_deploy_should_fail()
test_steps.deploy_compute_pool_with_replace(compute_pool_name)
test_steps.drop_compute_pool(compute_pool_name)
test_steps.list_should_not_return_compute_pool(compute_pool_name)


@pytest.fixture
def _test_setup(runner, snowflake_session):
compute_pool_test_setup = ComputePoolTestSetup(
30 changes: 30 additions & 0 deletions tests_integration/spcs/testing_utils/compute_pool_utils.py
Original file line number Diff line number Diff line change
@@ -18,12 +18,14 @@
import pytest
from snowflake.connector import SnowflakeConnection

from snowflake.cli.api.output.types import CommandResult
from tests_integration.conftest import SnowCLIRunner
from tests_integration.test_utils import contains_row_with, not_contains_row_with
from tests_integration.testing_utils.assertions.test_result_assertions import (
assert_that_result_is_successful_and_executed_successfully,
assert_that_result_is_successful_and_output_json_contains,
assert_that_result_is_successful_and_output_json_equals,
assert_that_result_failed_with_message_containing,
)


@@ -63,6 +65,34 @@ def create_compute_pool(self, compute_pool_name: str) -> None:
in result.json["status"] # type: ignore
)

def deploy_compute_pool(self, compute_pool_name: str) -> None:
result = self._deploy_compute_pool(replace=False)
assert_that_result_is_successful_and_output_json_contains(
result,
{"message": f"Compute pool '{compute_pool_name}' successfully deployed."},
)

def deploy_compute_pool_with_replace(self, compute_pool_name: str) -> None:
result = self._deploy_compute_pool(replace=True)
assert_that_result_is_successful_and_output_json_contains(
result,
{"message": f"Compute pool '{compute_pool_name}' successfully deployed."},
)

def second_deploy_should_fail(self) -> None:
result = self._deploy_compute_pool(replace=False)
assert_that_result_failed_with_message_containing(result, "already exists")

def _deploy_compute_pool(self, replace: bool) -> CommandResult:
params = [
"spcs",
"compute-pool",
"deploy",
]
if replace:
params.append("--replace")
return self._setup.runner.invoke_with_connection_json(params)

def list_should_return_compute_pool(self, compute_pool_name) -> None:
result = self._execute_list()
assert_that_result_is_successful_and_output_json_contains(
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
definition_version: "2"

entities:
test_compute_pool:
type: compute-pool
identifier:
name: test_compute_pool
min_nodes: 1
max_nodes: 1
instance_family: CPU_X64_XS
auto_resume: true
initially_suspended: true
auto_suspend_seconds: 60

0 comments on commit cd0ff29

Please sign in to comment.