Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Github workflow lint runner #218

Merged
merged 4 commits into from
Jan 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 25 additions & 6 deletions .github/workflows/lint.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@ name: Semantic Model Format & Lint

on:
pull_request:
- "*"
branches:
- "*"

jobs:
build:
Expand All @@ -20,39 +21,57 @@ jobs:
with:
python-version: ${{ matrix.python-version }}

# Caching dependencies using Poetry
- name: Cache Poetry virtualenv
uses: actions/cache@v2
with:
path: ~/.cache/pypoetry/virtualenvs
key: ${{ runner.os }}-poetry-${{ hashFiles('**/poetry.lock') }}
key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }}
restore-keys: |
${{ runner.os }}-poetry-

- name: Install Poetry
run: |
curl -sSL https://install.python-poetry.org | python3 -
python3 -m pip install --user pipx
python3 -m pipx ensurepath
pipx install poetry

- name: Configure Poetry
run: |
$HOME/.local/bin/poetry config virtualenvs.create false
export PATH="$HOME/.local/bin:$PATH"
poetry config virtualenvs.create false

- name: Install dependencies using Poetry
run: |
$HOME/.local/bin/poetry install --no-interaction
poetry install --no-interaction

- name: Run mypy
id: mypy
run: |
make run_mypy
continue-on-error: true

- name: Check with black
id: black
run: |
make check_black
continue-on-error: true

- name: Check with isort
id: isort
run: |
make check_isort
continue-on-error: true

- name: Run flake8
id: flake8
run: |
make run_flake8
continue-on-error: true

- name: Report failures
run: |
if [ "${{ steps.black.outcome }}" != "success" ]; then echo "black failed"; FAIL=1; fi
if [ "${{ steps.isort.outcome }}" != "success" ]; then echo "isort failed"; FAIL=1; fi
if [ "${{ steps.flake8.outcome }}" != "success" ]; then echo "flake8 failed"; FAIL=1; fi
if [ "$FAIL" == "1" ]; then exit 1; fi
continue-on-error: false
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ pyvenv
.vscode/settings.json
.vscode/.ropeproject
.vscode/*.log
.vscode/*.json

# Jetbrains
.idea/*
Expand Down
10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -38,21 +38,21 @@ run_mypy: ## Run mypy
mypy --config-file=mypy.ini .

run_flake8: ## Run flake8
flake8 --ignore=E203,E501,W503 --exclude=venv,pyvenv,tmp,*_pb2.py,*_pb2.pyi,images/*/src .
flake8 --ignore=E203,E501,W503 --exclude=venv,.venv,pyvenv,tmp,*_pb2.py,*_pb2.pyi,images/*/src .

check_black: ## Check to see if files would be updated with black.
# Exclude pyvenv and all generated protobuf code.
black --check --exclude="venv|pyvenv|.*_pb2.py|.*_pb2.pyi" .
black --check --exclude=".venv|venv|pyvenv|.*_pb2.py|.*_pb2.pyi" .

run_black: ## Run black to format files.
# Exclude pyvenv, tmp, and all generated protobuf code.
black --exclude="venv|pyvenv|tmp|.*_pb2.py|.*_pb2.pyi" .
black --exclude=".venv|venv|pyvenv|tmp|.*_pb2.py|.*_pb2.pyi" .

check_isort: ## Check if files would be updated with isort.
isort --profile black --check --skip=venv --skip=pyvenv --skip-glob='*_pb2.py*' .
isort --profile black --check --skip=venv --skip=pyvenv --skip=.venv --skip-glob='*_pb2.py*' .

run_isort: ## Run isort to update imports.
isort --profile black --skip=pyvenv --skip=venv --skip=tmp --skip-glob='*_pb2.py*' .
isort --profile black --skip=pyvenv --skip=venv --skip=tmp --skip=.venv --skip-glob='*_pb2.py*' .


fmt_lint: shell ## lint/fmt in current python environment
Expand Down
8 changes: 4 additions & 4 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,12 @@
from app_utils.shared_utils import ( # noqa: E402
GeneratorAppScreen,
get_snowflake_connection,
set_sit_query_tag,
set_account_name,
set_host_name,
set_user_name,
set_streamlit_location,
set_sit_query_tag,
set_snowpark_session,
set_streamlit_location,
set_user_name,
)
from semantic_model_generator.snowflake_utils.env_vars import ( # noqa: E402
SNOWFLAKE_ACCOUNT_LOCATOR,
Expand All @@ -28,7 +28,7 @@ def failed_connection_popup() -> None:
Renders a dialog box detailing that the credentials provided could not be used to connect to Snowflake.
"""
st.markdown(
f"""It looks like the credentials provided could not be used to connect to the account."""
"""It looks like the credentials provided could not be used to connect to the account."""
)
st.stop()

Expand Down
4 changes: 2 additions & 2 deletions app_utils/chat.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import re
from typing import Dict, Any
from typing import Any, Dict

import requests
import streamlit as st
Expand Down Expand Up @@ -32,7 +32,7 @@ def send_message(

resp = _snowflake.send_snow_api_request( # type: ignore
"POST",
f"/api/v2/cortex/analyst/message",
"/api/v2/cortex/analyst/message",
{},
{},
request_body,
Expand Down
28 changes: 11 additions & 17 deletions app_utils/shared_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,17 +4,16 @@
import os
import time
from dataclasses import dataclass
from datetime import datetime
from enum import Enum
from io import StringIO
from typing import Any, Optional, List, Union
from typing import Any, List, Optional, Union

import pandas as pd
import streamlit as st
from snowflake.snowpark import Session
from PIL import Image
from snowflake.connector import ProgrammingError
from snowflake.connector.connection import SnowflakeConnection
from snowflake.snowpark import Session

from semantic_model_generator.data_processing.proto_utils import (
proto_to_yaml,
Expand All @@ -26,23 +25,19 @@
)
from semantic_model_generator.protos import semantic_model_pb2
from semantic_model_generator.protos.semantic_model_pb2 import Dimension, Table
from semantic_model_generator.snowflake_utils.env_vars import ( # noqa: E402
assert_required_env_vars,
)
from semantic_model_generator.snowflake_utils.snowflake_connector import (
SnowflakeConnector,
fetch_databases,
fetch_schemas_in_database,
fetch_stages_in_schema,
fetch_tables_views_in_schema,
fetch_warehouses,
fetch_stages_in_schema,
fetch_yaml_names_in_stage,
)

from semantic_model_generator.snowflake_utils.env_vars import ( # noqa: E402
SNOWFLAKE_ACCOUNT_LOCATOR,
SNOWFLAKE_HOST,
SNOWFLAKE_USER,
assert_required_env_vars,
)

SNOWFLAKE_ACCOUNT = os.environ.get("SNOWFLAKE_ACCOUNT_LOCATOR", "")

# Add a logo on the top-left corner of the app
Expand Down Expand Up @@ -103,6 +98,7 @@ def get_snowflake_connection() -> SnowflakeConnection:
if st.session_state["sis"]:
# Import SiS-required modules
import sys

from snowflake.snowpark.context import get_active_session

# Non-Anaconda supported packages must be added to path to import from stage
Expand Down Expand Up @@ -994,7 +990,7 @@ def upload_yaml(file_name: str) -> None:
with tempfile.TemporaryDirectory() as temp_dir:
tmp_file_path = os.path.join(temp_dir, f"{file_name}.yaml")

with open(tmp_file_path, "w", encoding='utf-8') as temp_file:
with open(tmp_file_path, "w", encoding="utf-8") as temp_file:
temp_file.write(yaml)

st.session_state.session.file.put(
Expand Down Expand Up @@ -1052,12 +1048,10 @@ def download_yaml(file_name: str, stage_name: str) -> str:

with tempfile.TemporaryDirectory() as temp_dir:
# Downloads the YAML to {temp_dir}/{file_name}.
st.session_state.session.file.get(
f"@{stage_name}/{file_name}", temp_dir
)
st.session_state.session.file.get(f"@{stage_name}/{file_name}", temp_dir)

tmp_file_path = os.path.join(temp_dir, f"{file_name}")
with open(tmp_file_path, "r", encoding='utf-8') as temp_file:
with open(tmp_file_path, "r", encoding="utf-8") as temp_file:
# Read the raw contents from {temp_dir}/{file_name} and return it as a string.
yaml_str = temp_file.read()
return yaml_str
Expand Down Expand Up @@ -1263,7 +1257,7 @@ def model(self) -> Optional[str]:
return st.session_state.semantic_model.name # type: ignore
return None

def to_dict(self) -> dict[str, Union[str,None]]:
def to_dict(self) -> dict[str, Union[str, None]]:
return {
"User": self.user,
"Stage": self.stage,
Expand Down
16 changes: 8 additions & 8 deletions journeys/iteration.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,3 @@
from streamlit import config
# Set minCachedMessageSize to 500 MB to disable forward message cache:
# st.set_config would trigger an error, only the set_config from config module works
config.set_option("global.minCachedMessageSize", 500 * 1e6)

import json
import time
from typing import Any, Dict, List, Optional
Expand All @@ -11,6 +6,7 @@
import sqlglot
import streamlit as st
from snowflake.connector import ProgrammingError, SnowflakeConnection
from streamlit import config
from streamlit.delta_generator import DeltaGenerator
from streamlit_extras.row import row
from streamlit_extras.stylable_container import stylable_container
Expand Down Expand Up @@ -43,6 +39,10 @@
from semantic_model_generator.protos import semantic_model_pb2
from semantic_model_generator.validate_model import validate

# Set minCachedMessageSize to 500 MB to disable forward message cache:
# st.set_config would trigger an error, only the set_config from config module works
config.set_option("global.minCachedMessageSize", 500 * 1e6)


def get_file_name() -> str:
return st.session_state.file_name # type: ignore
Expand Down Expand Up @@ -179,9 +179,9 @@ def edit_verified_query(
st.session_state["successful_sql"] = True

except Exception as e:
st.session_state[
"error_state"
] = f"Edited SQL not compatible with semantic model provided, please double check: {e}"
st.session_state["error_state"] = (
f"Edited SQL not compatible with semantic model provided, please double check: {e}"
)

if st.session_state["error_state"] is not None:
st.error(st.session_state["error_state"])
Expand Down
2 changes: 1 addition & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ disallow_untyped_defs = True
warn_unused_ignores = False
disallow_any_generics = True

exclude = venv|pyvenv|(_test\.py|test_.*\.py)|_pb2\.py|_pb2\.pyi|admin_app/streamlit_app.py
exclude = .venv|venv|pyvenv|(_test\.py|test_.*\.py)|_pb2\.py|_pb2\.pyi|admin_app/streamlit_app.py

[mypy-semantic_model_generator.protos.semantic_model_pb2]
ignore_errors = True
Expand Down
12 changes: 6 additions & 6 deletions partner/dbt.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,11 @@
from snowflake.connector import ProgrammingError

from app_utils.shared_utils import (
download_yaml,
get_snowflake_connection,
get_yamls_from_stage,
set_sit_query_tag,
stage_selector_container,
get_yamls_from_stage,
download_yaml,
SnowflakeStage,
)

# Partner semantic support instructions
Expand Down Expand Up @@ -69,8 +68,9 @@ def upload_dbt_semantic() -> None:
stage_files = st.multiselect("Staged files", options=available_files)
if stage_files:
for staged_file in stage_files:
file_content = download_yaml(staged_file,
st.session_state["selected_iteration_stage"])
file_content = download_yaml(
staged_file, st.session_state["selected_iteration_stage"]
)
uploaded_files.append(file_content)
else:
uploaded_files = st.file_uploader( # type: ignore
Expand All @@ -80,7 +80,7 @@ def upload_dbt_semantic() -> None:
key="dbt_files",
)
if uploaded_files:
partner_semantic: list[Union[None,DBTSemanticModel]] = []
partner_semantic: list[Union[None, DBTSemanticModel]] = []
for file in uploaded_files:
partner_semantic.extend(read_dbt_yaml(file)) # type: ignore

Expand Down
14 changes: 7 additions & 7 deletions partner/looker.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,6 @@
from loguru import logger
from snowflake.connector import ProgrammingError, SnowflakeConnection

from partner.cortex import (
CortexDimension,
CortexMeasure,
CortexSemanticTable,
CortexTimeDimension,
)
from app_utils.shared_utils import (
GeneratorAppScreen,
check_valid_session_state_values,
Expand All @@ -27,6 +21,12 @@
set_sit_query_tag,
set_table_comment,
)
from partner.cortex import (
CortexDimension,
CortexMeasure,
CortexSemanticTable,
CortexTimeDimension,
)
from semantic_model_generator.data_processing.proto_utils import proto_to_dict

try:
Expand Down Expand Up @@ -523,7 +523,7 @@ def render_looker_explore_as_table(
target_lag: Optional[int] = 20,
target_lag_unit: Optional[str] = "minutes",
warehouse: Optional[str] = None,
) -> Union[None,dict[str, dict[str, str]]]:
) -> Union[None, dict[str, dict[str, str]]]:
"""
Creates materialized table corresponding to Looker Explore.
Args:
Expand Down
8 changes: 3 additions & 5 deletions partner/partner_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,13 +8,13 @@
import streamlit as st
import yaml

from partner.cortex import CortexSemanticTable
from partner.dbt import DBTSemanticModel, upload_dbt_semantic
from app_utils.shared_utils import (
get_snowflake_connection,
render_image,
set_sit_query_tag,
)
from partner.cortex import CortexSemanticTable
from partner.dbt import DBTSemanticModel, upload_dbt_semantic
from semantic_model_generator.data_processing.proto_utils import yaml_to_semantic_model


Expand Down Expand Up @@ -306,9 +306,7 @@ def integrate_partner_semantics() -> None:
index=0,
help=COMPARE_SEMANTICS_HELP,
)
orphan_label, orphan_col1, orphan_col2 = st.columns(
3, gap="small"
)
orphan_label, orphan_col1, orphan_col2 = st.columns(3, gap="small")
with orphan_label:
st.write("Retain unmatched fields:")
with orphan_col1:
Expand Down
2 changes: 1 addition & 1 deletion semantic_model_generator/tests/validate_model_test.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import json
from unittest.mock import patch, MagicMock
from unittest.mock import MagicMock, patch

from snowflake.connector import SnowflakeConnection

Expand Down
Loading
Loading