Skip to content

Commit b433445

Browse files
authored
Merge branch 'main' into python3.13
2 parents 999f01b + 0ff54bb commit b433445

26 files changed

+92
-91
lines changed

.github/workflows/contributed-recipes.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ jobs:
4242
env:
4343
REPOSITORY_OWNER: ${{ github.repository_owner }}
4444

45-
test-recipes:
45+
build:
4646
runs-on: ${{ matrix.runs-on }}
4747
timeout-minutes: 5
4848
needs: generate-matrix

.github/workflows/docker-tag-push.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ on:
1818
type: string
1919
timeout-minutes:
2020
description: Timeout in minutes
21-
default: 15
21+
default: 20
2222
type: number
2323
secrets:
2424
REGISTRY_USERNAME:

.github/workflows/docker.yml

+27-29
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ on:
1515
# We use local reusable workflows to make architecture clean and simple
1616
# https://docs.github.com/en/actions/sharing-automations/reusing-workflows
1717
- ".github/workflows/docker-build-test-upload.yml"
18-
- ".github/workflows/docker-merge-tags.yml"
1918
- ".github/workflows/docker-tag-push.yml"
2019
- ".github/workflows/docker-wiki-update.yml"
2120

@@ -39,7 +38,6 @@ on:
3938
paths:
4039
- ".github/workflows/docker.yml"
4140
- ".github/workflows/docker-build-test-upload.yml"
42-
- ".github/workflows/docker-merge-tags.yml"
4341
- ".github/workflows/docker-tag-push.yml"
4442
- ".github/workflows/docker-wiki-update.yml"
4543

@@ -90,7 +88,7 @@ jobs:
9088
platform: aarch64
9189
runs-on: ubuntu-24.04-arm
9290
timeout-minutes: 15
93-
needs: [aarch64-foundation]
91+
needs: aarch64-foundation
9492

9593
x86_64-base:
9694
uses: ./.github/workflows/docker-build-test-upload.yml
@@ -100,7 +98,7 @@ jobs:
10098
platform: x86_64
10199
runs-on: ubuntu-24.04
102100
timeout-minutes: 15
103-
needs: [x86_64-foundation]
101+
needs: x86_64-foundation
104102

105103
aarch64-minimal:
106104
uses: ./.github/workflows/docker-build-test-upload.yml
@@ -110,7 +108,7 @@ jobs:
110108
platform: aarch64
111109
runs-on: ubuntu-24.04-arm
112110
timeout-minutes: 15
113-
needs: [aarch64-base]
111+
needs: aarch64-base
114112
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
115113

116114
x86_64-minimal:
@@ -121,7 +119,7 @@ jobs:
121119
platform: x86_64
122120
runs-on: ubuntu-24.04
123121
timeout-minutes: 15
124-
needs: [x86_64-base]
122+
needs: x86_64-base
125123
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
126124

127125
aarch64-scipy:
@@ -132,7 +130,7 @@ jobs:
132130
platform: aarch64
133131
runs-on: ubuntu-24.04-arm
134132
timeout-minutes: 15
135-
needs: [aarch64-minimal]
133+
needs: aarch64-minimal
136134
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
137135

138136
x86_64-scipy:
@@ -143,7 +141,7 @@ jobs:
143141
platform: x86_64
144142
runs-on: ubuntu-24.04
145143
timeout-minutes: 15
146-
needs: [x86_64-minimal]
144+
needs: x86_64-minimal
147145
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
148146

149147
aarch64-r:
@@ -154,7 +152,7 @@ jobs:
154152
platform: aarch64
155153
runs-on: ubuntu-24.04-arm
156154
timeout-minutes: 15
157-
needs: [aarch64-minimal]
155+
needs: aarch64-minimal
158156
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
159157

160158
x86_64-r:
@@ -165,7 +163,7 @@ jobs:
165163
platform: x86_64
166164
runs-on: ubuntu-24.04
167165
timeout-minutes: 15
168-
needs: [x86_64-minimal]
166+
needs: x86_64-minimal
169167
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
170168

171169
aarch64-julia:
@@ -177,7 +175,7 @@ jobs:
177175
runs-on: ubuntu-24.04-arm
178176
# This workflow sometimes takes quite long to build
179177
timeout-minutes: 30
180-
needs: [aarch64-minimal]
178+
needs: aarch64-minimal
181179
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
182180

183181
x86_64-julia:
@@ -188,7 +186,7 @@ jobs:
188186
platform: x86_64
189187
runs-on: ubuntu-24.04
190188
timeout-minutes: 20
191-
needs: [x86_64-minimal]
189+
needs: x86_64-minimal
192190
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
193191

194192
aarch64-tensorflow:
@@ -199,7 +197,7 @@ jobs:
199197
platform: aarch64
200198
runs-on: ubuntu-24.04-arm
201199
timeout-minutes: 15
202-
needs: [aarch64-scipy]
200+
needs: aarch64-scipy
203201
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
204202

205203
x86_64-tensorflow:
@@ -210,7 +208,7 @@ jobs:
210208
platform: x86_64
211209
runs-on: ubuntu-24.04
212210
timeout-minutes: 15
213-
needs: [x86_64-scipy]
211+
needs: x86_64-scipy
214212
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
215213

216214
x86_64-tensorflow-cuda:
@@ -222,7 +220,7 @@ jobs:
222220
platform: x86_64
223221
runs-on: ubuntu-24.04
224222
timeout-minutes: 20
225-
needs: [x86_64-scipy]
223+
needs: x86_64-scipy
226224
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
227225

228226
aarch64-pytorch:
@@ -233,7 +231,7 @@ jobs:
233231
platform: aarch64
234232
runs-on: ubuntu-24.04-arm
235233
timeout-minutes: 20
236-
needs: [aarch64-scipy]
234+
needs: aarch64-scipy
237235
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
238236

239237
x86_64-pytorch:
@@ -244,7 +242,7 @@ jobs:
244242
platform: x86_64
245243
runs-on: ubuntu-24.04
246244
timeout-minutes: 20
247-
needs: [x86_64-scipy]
245+
needs: x86_64-scipy
248246
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
249247

250248
x86_64-pytorch-cuda11:
@@ -256,7 +254,7 @@ jobs:
256254
platform: x86_64
257255
runs-on: ubuntu-24.04
258256
timeout-minutes: 20
259-
needs: [x86_64-scipy]
257+
needs: x86_64-scipy
260258
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
261259

262260
x86_64-pytorch-cuda12:
@@ -268,7 +266,7 @@ jobs:
268266
platform: x86_64
269267
runs-on: ubuntu-24.04
270268
timeout-minutes: 20
271-
needs: [x86_64-scipy]
269+
needs: x86_64-scipy
272270
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
273271

274272
aarch64-datascience:
@@ -280,7 +278,7 @@ jobs:
280278
runs-on: ubuntu-24.04-arm
281279
# This workflow sometimes takes quite long to build
282280
timeout-minutes: 30
283-
needs: [aarch64-scipy]
281+
needs: aarch64-scipy
284282
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
285283

286284
x86_64-datascience:
@@ -291,7 +289,7 @@ jobs:
291289
platform: x86_64
292290
runs-on: ubuntu-24.04
293291
timeout-minutes: 25
294-
needs: [x86_64-scipy]
292+
needs: x86_64-scipy
295293
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
296294

297295
aarch64-pyspark:
@@ -302,7 +300,7 @@ jobs:
302300
platform: aarch64
303301
runs-on: ubuntu-24.04-arm
304302
timeout-minutes: 20
305-
needs: [aarch64-scipy]
303+
needs: aarch64-scipy
306304
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
307305

308306
x86_64-pyspark:
@@ -313,7 +311,7 @@ jobs:
313311
platform: x86_64
314312
runs-on: ubuntu-24.04
315313
timeout-minutes: 15
316-
needs: [x86_64-scipy]
314+
needs: x86_64-scipy
317315
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
318316

319317
aarch64-all-spark:
@@ -324,7 +322,7 @@ jobs:
324322
platform: aarch64
325323
runs-on: ubuntu-24.04-arm
326324
timeout-minutes: 20
327-
needs: [aarch64-pyspark]
325+
needs: aarch64-pyspark
328326
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
329327

330328
x86_64-all-spark:
@@ -335,10 +333,10 @@ jobs:
335333
platform: x86_64
336334
runs-on: ubuntu-24.04
337335
timeout-minutes: 15
338-
needs: [x86_64-pyspark]
336+
needs: x86_64-pyspark
339337
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
340338

341-
images-tag-push:
339+
tag-push:
342340
uses: ./.github/workflows/docker-tag-push.yml
343341
with:
344342
image: ${{ matrix.image }}
@@ -401,7 +399,7 @@ jobs:
401399
]
402400
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
403401

404-
images-tag-push-fast:
402+
tag-push-fast:
405403
uses: ./.github/workflows/docker-tag-push.yml
406404
with:
407405
image: ${{ matrix.image }}
@@ -418,14 +416,14 @@ jobs:
418416

419417
wiki-update:
420418
uses: ./.github/workflows/docker-wiki-update.yml
421-
needs: [images-tag-push]
419+
needs: tag-push
422420
if: ${{ !contains(github.event.pull_request.title, '[FAST_BUILD]') }}
423421
permissions:
424422
contents: write
425423

426424
wiki-update-fast:
427425
uses: ./.github/workflows/docker-wiki-update.yml
428-
needs: [images-tag-push-fast]
426+
needs: tag-push-fast
429427
if: contains(github.event.pull_request.title, '[FAST_BUILD]')
430428

431429
contributed-recipes:

.github/workflows/sphinx.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ on:
1111

1212
- "docs/**"
1313

14-
# Thse files are also rendered as docs pages
14+
# These files are also rendered as docs pages
1515
- "README.md"
1616
- "CHANGELOG.md"
1717

docs/using/custom-images.md

+3-4
Original file line numberDiff line numberDiff line change
@@ -19,9 +19,8 @@ Our repository provides several customization points:
1919
- `PYTHON_VERSION` (docker argument) - the Python version to install in `docker-stacks-foundation` image
2020
- `REGISTRY`, `OWNER`, `BASE_IMAGE` (docker arguments) - they allow to specify parent image for all the other images
2121
- `REGISTRY`, `OWNER` (part of `env` in some GitHub workflows) - these allow to properly tag and refer to images during following steps:
22-
[`build-test-upload`](https://github.com/jupyter/docker-stacks/blob/main/.github/workflows/docker-build-test-upload.yml),
23-
[`tag-push`](https://github.com/jupyter/docker-stacks/blob/main/.github/workflows/docker-tag-push.yml) and
24-
[`merge-tags`](https://github.com/jupyter/docker-stacks/blob/main/.github/workflows/docker-merge-tags.yml)
22+
- [`build-test-upload`](https://github.com/jupyter/docker-stacks/blob/main/.github/workflows/docker-build-test-upload.yml)
23+
- [`tag-push`](https://github.com/jupyter/docker-stacks/blob/main/.github/workflows/docker-tag-push.yml)
2524

2625
These customization points can't be changed during runtime.
2726
Read more about [Docker build arguments](https://docs.docker.com/build/building/variables/#arg-usage-example) and [GitHub environment variables for a single workflow](https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#defining-environment-variables-for-a-single-workflow).
@@ -57,7 +56,7 @@ FROM $BASE_IMAGE
5756

5857
Include the file below in your project:
5958

60-
```{literalinclude} recipe_code/docker-bake.python312.hcl
59+
```{literalinclude} recipe_code/docker-bake.custom-python.hcl
6160
:force:
6261
:language: hcl
6362
:caption: docker-bake.hcl

docs/using/recipe_code/custom_environment.dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@ ARG BASE_IMAGE=quay.io/jupyter/minimal-notebook
22
FROM $BASE_IMAGE
33

44
# Name your environment and choose the Python version
5-
ARG env_name=python310
6-
ARG py_ver=3.10
5+
ARG env_name=python313
6+
ARG py_ver=3.13
77

88
# You can add additional libraries here
99
RUN mamba create --yes -p "${CONDA_DIR}/envs/${env_name}" \

docs/using/recipe_code/docker-bake.python312.hcl renamed to docs/using/recipe_code/docker-bake.custom-python.hcl

+1-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ group "default" {
55
target "foundation" {
66
context = "https://github.com/jupyter/docker-stacks.git#main:images/docker-stacks-foundation"
77
args = {
8-
PYTHON_VERSION = "3.12"
8+
PYTHON_VERSION = "3.13"
99
}
1010
tags = ["docker-stacks-foundation"]
1111
}
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
ARG BASE_IMAGE=quay.io/jupyter/base-notebook
22
FROM $BASE_IMAGE
33

4-
RUN mamba install --yes 'jupyterhub-singleuser==4.0.1' && \
4+
RUN mamba install --yes 'jupyterhub-singleuser==5.2.1' && \
55
mamba clean --all -f -y && \
66
fix-permissions "${CONDA_DIR}" && \
77
fix-permissions "/home/${NB_USER}"

images/pyspark-notebook/setup_spark.py

+1
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,7 @@ def version_array(ver: str) -> tuple[int, int, int, str]:
5555

5656

5757
def download_spark(
58+
*,
5859
spark_version: str,
5960
hadoop_version: str,
6061
scala_version: str,

requirements-dev.txt

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@ docker
22
plumbum
33
pre-commit
44
pytest
5+
pytest-rerunfailures
56
# `pytest-xdist` is a plugin that provides the `--numprocesses` flag,
67
# allowing us to run `pytest` tests in parallel
78
pytest-xdist

tagging/apps/apply_tags.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,9 @@
1717
def apply_tags(config: Config) -> None:
1818
LOGGER.info(f"Tagging image: {config.image}")
1919

20-
file_prefix = get_file_prefix_for_platform(config.platform, config.variant)
20+
file_prefix = get_file_prefix_for_platform(
21+
platform=config.platform, variant=config.variant
22+
)
2123
filename = f"{file_prefix}-{config.image}.txt"
2224
tags = (config.tags_dir / filename).read_text().splitlines()
2325

tagging/apps/merge_tags.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,9 @@ def read_local_tags_from_files(config: Config) -> tuple[list[str], set[str]]:
2626
for platform in ALL_PLATFORMS:
2727
LOGGER.info(f"Reading tags for platform: {platform}")
2828

29-
file_prefix = get_file_prefix_for_platform(platform, config.variant)
29+
file_prefix = get_file_prefix_for_platform(
30+
platform=platform, variant=config.variant
31+
)
3032
filename = f"{file_prefix}-{config.image}.txt"
3133
path = config.tags_dir / filename
3234
if not path.exists():
@@ -83,7 +85,7 @@ def pull_missing_tags(merged_tag: str, all_local_tags: list[str]) -> list[str]:
8385

8486
def push_manifest(merged_tag: str, existing_platform_tags: list[str]) -> None:
8587
LOGGER.info(f"Creating manifest for tag: {merged_tag}")
86-
# Unforunately, `docker manifest create` requires images to have been already pushed to the registry
88+
# Unfortunately, `docker manifest create` requires images to have been already pushed to the registry
8789
# which is not true for new tags in PRs
8890
run_with_retries(
8991
lambda: docker["manifest", "create", merged_tag][existing_platform_tags]

0 commit comments

Comments
 (0)