Skip to content

Commit df7e9f6

Browse files
authored
Update CI/CD JFrog integration (#2385)
## Changes ### What does this PR do? This PR addresses some issues that were preventing the normal pull-request CI/CD jobs from working properly: - The jobs were missing permissions that are needed to access JFrog. - The project setup needed some changes to the way the dependencies for build backend (`hatchling`) are resolved. - A workaround is also put in place to deal with a packaging error with `databricks-bb-analyzer`: depending on the install order of packages, this error prevents the import of `databricks.*` and `databricks.labs.*` modules from other packages. Acceptance tests still fail, because we can't set up a Spark service at the moment and some tests depend on this. ### Relevant implementation details The build backend for a project does not fall within uv's normal locking/pinning (`uv.lock`) mechanism, which means uv always attempts to resolve dependencies. When doing so, it will apply the configured cooldown period: 7 days. Unfortunately JFrog does not provide timestamp information for PyPi artefacts, and uv therefore disqualifies everything during resolution. There are two ways of dealing with this: 1) drop the cooldown configuration; 2) exempt those dependencies from the cooldown configuration. Of these, this PR implements the latter so that the rest of the project dependencies are handled properly when locking the dependencies. (The mirrors we're using _also_ apply the cooldown period, but for this sort of thing it's best if both apply it just in case there's a problem with one of them.) ### Caveats/things to watch out for when reviewing: The scope of this PR is addressing issues in the `build` check (`push.yml`): other workflows and jobs still fail but are not in scope.
1 parent aeb17d5 commit df7e9f6

4 files changed

Lines changed: 42 additions & 2 deletions

File tree

.github/workflows/acceptance.yml

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ jobs:
2020
integration:
2121
environment: tool
2222
permissions:
23-
# Access to the integration testing infrastructure.
23+
# Access to JFrog and the integration testing infrastructure.
2424
id-token: write
2525
# Write test results to the PR.
2626
pull-requests: write
@@ -48,7 +48,10 @@ jobs:
4848
$GITHUB_WORKSPACE/.github/scripts/setup_mssql_odbc.sh
4949
5050
# TODO: Migrate tests to use Databricks clusters instead of Spark local mode
51+
# Disabled for now, because access to archives.apache.org is blocked; a new approach will be needed for this.
52+
# (The integration tests will still run, but many will fail due to this.)
5153
- name: Setup spark
54+
if: false
5255
run: |
5356
chmod +x $GITHUB_WORKSPACE/.github/scripts/setup_spark_remote.sh
5457
$GITHUB_WORKSPACE/.github/scripts/setup_spark_remote.sh

.github/workflows/push.yml

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,9 @@ jobs:
2222
runs-on:
2323
group: databrickslabs-protected-runner-group
2424
labels: linux-ubuntu-latest
25+
permissions:
26+
# JFrog OIDC authentication.
27+
id-token: write
2528
steps:
2629
- name: Checkout
2730
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -35,6 +38,9 @@ jobs:
3538
- name: Setup for JFrog
3639
uses: ./.github/actions/jfrog-auth
3740

41+
- name: Pre-sync environment to work around analyzer bug
42+
run: make dev
43+
3844
- name: Run unit tests
3945
run: make test
4046

@@ -48,6 +54,9 @@ jobs:
4854
runs-on:
4955
group: databrickslabs-protected-runner-group
5056
labels: linux-ubuntu-latest
57+
permissions:
58+
# JFrog OIDC authentication.
59+
id-token: write
5160
steps:
5261
- name: Checkout
5362
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
@@ -95,6 +104,9 @@ jobs:
95104
runs-on:
96105
group: databrickslabs-protected-runner-group
97106
labels: linux-ubuntu-latest
107+
permissions:
108+
# JFrog OIDC authentication.
109+
id-token: write
98110
env:
99111
INPUT_DIR_PARENT: .
100112
OUTPUT_DIR: ./test-reports

Makefile

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,16 @@ clean: docs-clean
1414

1515
dev:
1616
uv sync --all-extras
17+
# Workaround: databricks-bb-analyzer is missing databricks/__init__.py in its wheel.
18+
# If it's installed last the namespace package breaks. Ensure the file exists.
19+
@for f in .venv/lib/python*/site-packages/databricks/__init__.py \
20+
.venv/lib/python*/site-packages/databricks/labs/__init__.py; \
21+
do \
22+
grep -q 'extend_path' "$$f" 2>/dev/null || { \
23+
printf '__path__ = __import__("pkgutil").extend_path(__path__, __name__)\n' > "$$f"; \
24+
printf 'Warning: workaround needed (and configured) for analyzer packaging bug: %s\n' "$$f"; \
25+
} \
26+
done
1727

1828
lint:
1929
$(UV_RUN) black --check .

pyproject.toml

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,9 @@ Issues = "https://github.com/databrickslabs/lakebridge/issues"
5252
Source = "https://github.com/databrickslabs/lakebridge"
5353

5454
[build-system]
55-
requires = ["hatchling"]
55+
requires = [
56+
"hatchling~=1.29.0"
57+
]
5658
build-backend = "hatchling.build"
5759

5860
[tool.hatch.build]
@@ -123,6 +125,19 @@ databricks-labs-pytester = false
123125
databricks-sdk = false
124126
databricks-switch-plugin = false
125127

128+
# These packages are here because the backend (hatchling) depends on them, but backend dependencies are not covered
129+
# by uv.lock. To load the project these must always be resolved, including under CI/CD where JFrog is used. However
130+
# JFrog does not provide timestamp metadata, which leads to uv excluding them from resolution. The only way to deal
131+
# with this currently is to exempt them from the 'exclude-newer' policy: the only alternative would be to not have
132+
# them at all.
133+
editables = false
134+
hatchling = false
135+
packaging = false
136+
pathspec = false
137+
pluggy = false
138+
tomli = false
139+
trove-classifiers = false
140+
126141
[tool.pytest.ini_options]
127142
addopts = "-s -p no:warnings -vv --cache-clear"
128143
cache_dir = ".venv/pytest-cache"

0 commit comments

Comments
 (0)