Skip to content

Commit

Permalink
Merge branch 'databricks:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
pivoshenko authored Jan 12, 2025
2 parents a0dfb51 + cbae014 commit b8300eb
Show file tree
Hide file tree
Showing 19 changed files with 854 additions and 111 deletions.
2 changes: 1 addition & 1 deletion .codegen/_openapi_sha
Original file line number Diff line number Diff line change
@@ -1 +1 @@
a6a317df8327c9b1e5cb59a03a42ffa2aabeef6d
779817ed8d63031f5ea761fbd25ee84f38feec0d
9 changes: 6 additions & 3 deletions .github/workflows/external-message.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,10 @@ on:

jobs:
comment-on-pr:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco

permissions:
pull-requests: write

Expand Down Expand Up @@ -44,13 +47,13 @@ jobs:
gh pr comment ${{ github.event.pull_request.number }} --body \
"<!-- INTEGRATION_TESTS_MANUAL -->
If integration tests don't run automatically, an authorized user can run them manually by following the instructions below:
Trigger:
[go/deco-tests-run/sdk-py](https://go/deco-tests-run/sdk-py)
Inputs:
* PR number: ${{github.event.pull_request.number}}
* Commit SHA: \`${{ env.COMMIT_SHA }}\`
Checks will be approved automatically on success.
"
34 changes: 23 additions & 11 deletions .github/workflows/integration-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,16 @@ on:
types: [opened, synchronize]

merge_group:


jobs:
check-token:
name: Check secrets access
runs-on: ubuntu-latest

runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco

environment: "test-trigger-is"
outputs:
has_token: ${{ steps.set-token-status.outputs.has_token }}
Expand All @@ -26,14 +30,18 @@ jobs:
echo "DECO_WORKFLOW_TRIGGER_APP_ID is set. User has access to secrets."
echo "::set-output name=has_token::true"
fi
trigger-tests:
name: Trigger Tests
runs-on: ubuntu-latest

runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco

needs: check-token
if: github.event_name == 'pull_request' && needs.check-token.outputs.has_token == 'true'
environment: "test-trigger-is"

steps:
- uses: actions/checkout@v3

Expand All @@ -45,26 +53,30 @@ jobs:
private-key: ${{ secrets.DECO_WORKFLOW_TRIGGER_PRIVATE_KEY }}
owner: ${{ secrets.ORG_NAME }}
repositories: ${{secrets.REPO_NAME}}

- name: Trigger Workflow in Another Repo
env:
GH_TOKEN: ${{ steps.generate-token.outputs.token }}
run: |
gh workflow run sdk-py-isolated-pr.yml -R ${{ secrets.ORG_NAME }}/${{secrets.REPO_NAME}} \
--ref main \
-f pull_request_number=${{ github.event.pull_request.number }} \
-f commit_sha=${{ github.event.pull_request.head.sha }}
-f commit_sha=${{ github.event.pull_request.head.sha }}
# Statuses and checks apply to specific commits (by hash).
# Statuses and checks apply to specific commits (by hash).
# Enforcement of required checks is done both at the PR level and the merge queue level.
# In case of multiple commits in a single PR, the hash of the squashed commit
# In case of multiple commits in a single PR, the hash of the squashed commit
# will not match the one for the latest (approved) commit in the PR.
# We auto approve the check for the merge queue for two reasons:
# * Queue times out due to duration of tests.
# * Avoid running integration tests twice, since it was already run at the tip of the branch before squashing.
auto-approve:
if: github.event_name == 'merge_group'
runs-on: ubuntu-latest

runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco

steps:
- name: Mark Check
env:
Expand All @@ -75,4 +87,4 @@ jobs:
-H "X-GitHub-Api-Version: 2022-11-28" \
/repos/${{ github.repository }}/statuses/${{ github.sha }} \
-f 'state=success' \
-f 'context=Integration Tests Check'
-f 'context=Integration Tests Check'
7 changes: 6 additions & 1 deletion .github/workflows/release-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,15 @@ on:

jobs:
publish:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco

environment: release-test

permissions:
id-token: write

steps:
- uses: actions/checkout@v3

Expand Down
7 changes: 6 additions & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,11 +7,16 @@ on:

jobs:
publish:
runs-on: ubuntu-latest
runs-on:
group: databricks-deco-testing-runner-group
labels: ubuntu-latest-deco

environment: release

permissions:
contents: write
id-token: write

steps:
- uses: actions/checkout@v3

Expand Down
13 changes: 11 additions & 2 deletions databricks/sdk/__init__.py

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

19 changes: 16 additions & 3 deletions databricks/sdk/_base_client.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import io
import logging
import urllib.parse
from abc import ABC, abstractmethod
from datetime import timedelta
from types import TracebackType
from typing import (Any, BinaryIO, Callable, Dict, Iterable, Iterator, List,
Expand Down Expand Up @@ -285,8 +286,20 @@ def _record_request_log(self, response: requests.Response, raw: bool = False) ->
logger.debug(RoundTrip(response, self._debug_headers, self._debug_truncate_bytes, raw).generate())


class _RawResponse(ABC):

@abstractmethod
# follows Response signature: https://github.com/psf/requests/blob/main/src/requests/models.py#L799
def iter_content(self, chunk_size: int = 1, decode_unicode: bool = False):
pass

@abstractmethod
def close(self):
pass


class _StreamingResponse(BinaryIO):
_response: requests.Response
_response: _RawResponse
_buffer: bytes
_content: Union[Iterator[bytes], None]
_chunk_size: Union[int, None]
Expand All @@ -298,7 +311,7 @@ def fileno(self) -> int:
def flush(self) -> int:
pass

def __init__(self, response: requests.Response, chunk_size: Union[int, None] = None):
def __init__(self, response: _RawResponse, chunk_size: Union[int, None] = None):
self._response = response
self._buffer = b''
self._content = None
Expand All @@ -308,7 +321,7 @@ def _open(self) -> None:
if self._closed:
raise ValueError("I/O operation on closed file")
if not self._content:
self._content = self._response.iter_content(chunk_size=self._chunk_size)
self._content = self._response.iter_content(chunk_size=self._chunk_size, decode_unicode=False)

def __enter__(self) -> BinaryIO:
self._open()
Expand Down
5 changes: 5 additions & 0 deletions databricks/sdk/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,11 @@ class Config:
max_connections_per_pool: int = ConfigAttribute()
databricks_environment: Optional[DatabricksEnvironment] = None

enable_experimental_files_api_client: bool = ConfigAttribute(
env='DATABRICKS_ENABLE_EXPERIMENTAL_FILES_API_CLIENT')
files_api_client_download_max_total_recovers = None
files_api_client_download_max_total_recovers_without_progressing = 1

def __init__(
self,
*,
Expand Down
2 changes: 1 addition & 1 deletion databricks/sdk/data_plane.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from typing import Callable, List

from databricks.sdk.oauth import Token
from databricks.sdk.service.oauth2 import DataPlaneInfo


@dataclass
Expand All @@ -19,6 +18,7 @@ class DataPlaneDetails:

class DataPlaneService:
"""Helper class to fetch and manage DataPlane details."""
from .service.serving import DataPlaneInfo

def __init__(self):
self._data_plane_info = {}
Expand Down
Loading

0 comments on commit b8300eb

Please sign in to comment.