-
Notifications
You must be signed in to change notification settings - Fork 70
188 lines (178 loc) · 6.65 KB
/
test.yml
File metadata and controls
188 lines (178 loc) · 6.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
name: Test Python package
on:
push:
branches:
- main
pull_request:
# When this workflow is queued, automatically cancel any previous running
# or pending jobs from the same branch
concurrency:
group: test-${{ github.head_ref }}
cancel-in-progress: true
# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}
jobs:
detect-upstream-trigger:
name: Check for upstream trigger phrase to test with upstream dask
runs-on: ubuntu-latest
if: github.repository == 'dask-contrib/dask-sql'
outputs:
triggered: ${{ steps.detect-upstream-trigger.outputs.trigger-found }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 2
- uses: xarray-contrib/ci-trigger@v1.2
id: detect-upstream-trigger
with:
keyword: "[test-upstream]"
detect-datafusion-upstream-trigger:
name: Check for upstream trigger phrase to test with upstream datafusion
runs-on: ubuntu-latest
if: github.repository == 'dask-contrib/dask-sql'
outputs:
triggered: ${{ steps.detect-datafusion-upstream-trigger.outputs.trigger-found }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 2
- uses: xarray-contrib/ci-trigger@v1.2
id: detect-datafusion-upstream-trigger
with:
keyword: "[test-df-upstream]"
test:
name: "Build & Test (${{ matrix.os }}, python: ${{ matrix.python }})"
needs: [detect-upstream-trigger, detect-datafusion-upstream-trigger]
runs-on: ${{ matrix.os }}
env:
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}-dev.yaml
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python: ["3.8", "3.9", "3.10"]
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2.2.0
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: ${{ matrix.python }}
channel-priority: strict
activate-environment: dask-sql
environment-file: ${{ env.CONDA_FILE }}
- name: Optionally update upstream datafusion & cargo dependencies
if: needs.detect-datafusion-upstream-trigger.outputs.triggered == 'true'
run: |
cd dask_planner
bash update-dependencies.sh
- name: Build the Rust DataFusion bindings
run: |
python setup.py build install
- name: Install hive testing dependencies
if: matrix.os == 'ubuntu-latest'
run: |
mamba install -c conda-forge "sasl>=0.3.1"
docker pull bde2020/hive:2.3.2-postgresql-metastore
docker pull bde2020/hive-metastore-postgresql:2.3.0
- name: Optionally install upstream dev Dask
if: needs.detect-upstream-trigger.outputs.triggered == 'true'
run: |
mamba install --no-channel-priority dask/label/dev::dask
- name: Test with pytest
run: |
pytest --junitxml=junit/test-results.xml --cov-report=xml -n auto tests --dist loadfile
- name: Upload pytest test results
if: always()
uses: actions/upload-artifact@v1
with:
name: pytest-results
path: junit/test-results.xml
- name: Upload coverage to Codecov
if: github.repository == 'dask-contrib/dask-sql'
uses: codecov/codecov-action@v3
cluster:
name: "Test in a dask cluster"
needs: [detect-upstream-trigger, detect-datafusion-upstream-trigger]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2.2.0
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: "3.9"
channel-priority: strict
activate-environment: dask-sql
environment-file: continuous_integration/environment-3.9-dev.yaml
- name: Optionally update upstream datafusion & cargo dependencies
if: needs.detect-datafusion-upstream-trigger.outputs.triggered == 'true'
run: |
cd dask_planner
bash update-dependencies.sh
- name: Build the Rust DataFusion bindings
run: |
python setup.py build install
- name: Install dependencies
run: |
mamba install python-blosc lz4 -c conda-forge
which python
pip list
mamba list
- name: Optionally install upstream dev Dask
if: needs.detect-upstream-trigger.outputs.triggered == 'true'
run: |
mamba install --no-channel-priority dask/label/dev::dask
- name: run a dask cluster
env:
UPSTREAM: ${{ needs.detect-upstream-trigger.outputs.triggered }}
run: |
if [[ $UPSTREAM == "true" ]]; then
docker-compose -f .github/cluster-upstream.yml up -d
else
docker-compose -f .github/cluster.yml up -d
fi
# periodically ping logs until a connection has been established; assume failure after 2 minutes
timeout 2m bash -c 'until docker logs dask-worker 2>&1 | grep -q "Starting established connection"; do sleep 1; done'
docker logs dask-scheduler
docker logs dask-worker
- name: Test with pytest while running an independent dask cluster
run: |
DASK_SQL_TEST_SCHEDULER="tcp://127.0.0.1:8786" pytest tests
import:
name: "Test importing with bare requirements"
needs: [detect-upstream-trigger, detect-datafusion-upstream-trigger]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: conda-incubator/setup-miniconda@v2.2.0
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: "3.8"
channel-priority: strict
- name: Optionally update upstream datafusion & cargo dependencies
if: needs.detect-datafusion-upstream-trigger.outputs.triggered == 'true'
run: |
cd dask_planner
bash update-dependencies.sh
- name: Install dependencies and nothing else
run: |
mamba install setuptools-rust
pip install -e . -vv
which python
pip list
mamba list
- name: Optionally install upstream dev Dask
if: needs.detect-upstream-trigger.outputs.triggered == 'true'
run: |
python -m pip install --no-deps git+https://github.com/dask/dask
python -m pip install --no-deps git+https://github.com/dask/distributed
- name: Try to import dask-sql
run: |
python -c "import dask_sql; print('ok')"