@@ -31,14 +31,16 @@ RUN uv pip uninstall --system google-cloud-bigquery-storage
31
31
# b/394382016: sigstore (dependency of kagglehub) requires a prerelease packages, installing separate.
32
32
RUN uv pip install --system --force-reinstall --prerelease=allow kagglehub[pandas-datasets,hf-datasets,signing]>=0.3.9
33
33
34
+ # google-cloud-automl 2.0.0 introduced incompatible API changes, need to pin to 1.0.1
35
+ # Keras 3.6 broke test_keras.py > test_train > keras.datasets.mnist.load_data():
36
+ # See https://github.com/keras-team/keras/commit/dcefb139863505d166dd1325066f329b3033d45a
37
+ RUN uv pip install --system google-cloud-automl==1.0.1 google-cloud-aiplatform google-cloud-translate==3.12.1 \
38
+ google-cloud-videointelligence google-cloud-vision google-genai "keras<3.6"
39
+
34
40
# uv cannot install this in requirements.txt without --no-build-isolation
35
41
# to avoid affecting the larger build, we'll post-install it.
36
42
RUN uv pip install --no-build-isolation --system "git+https://github.com/Kaggle/learntools"
37
43
38
- # b/385161357 Latest Colab uses tf 2.17.1, but tf decision forests only has a version for 2.17.0.
39
- # Instead, we'll install tfdf with its deps and hope that 2.17.0 compat tfdf works with tf 2.17.1.
40
- RUN uv pip install --system --no-deps tensorflow-decision-forests==1.10.0 wurlitzer==3.1.1 ydf==0.9.0
41
-
42
44
# b/385145217 Latest Colab lacks mkl numpy, install it.
43
45
RUN uv pip install --system --force-reinstall -i https://pypi.anaconda.org/intel/simple numpy
44
46
@@ -52,8 +54,8 @@ ADD clean-layer.sh /tmp/clean-layer.sh
52
54
ADD patches/nbconvert-extensions.tpl /opt/kaggle/nbconvert-extensions.tpl
53
55
ADD patches/template_conf.json /opt/kaggle/conf.json
54
56
55
- # /opt/conda/lib/python3.10 /site-packages
56
- ARG PACKAGE_PATH=/usr/local/lib/python3.10 /dist-packages
57
+ # /opt/conda/lib/python3.11 /site-packages
58
+ ARG PACKAGE_PATH=/usr/local/lib/python3.11 /dist-packages
57
59
58
60
# Install GPU-specific non-pip packages.
59
61
{{ if eq .Accelerator "gpu" }}
@@ -108,6 +110,7 @@ RUN apt-get install -y libfreetype6-dev && \
108
110
apt-get install -y libglib2.0-0 libxext6 libsm6 libxrender1 libfontconfig1 --fix-missing
109
111
110
112
# NLTK Project datasets
113
+ RUN uv pip install --system --force-reinstall "nltk>=3.9.1"
111
114
RUN mkdir -p /usr/share/nltk_data && \
112
115
# NLTK Downloader no longer continues smoothly after an error, so we explicitly list
113
116
# the corpuses that work
@@ -120,7 +123,7 @@ RUN mkdir -p /usr/share/nltk_data && \
120
123
masc_tagged maxent_ne_chunker maxent_treebank_pos_tagger moses_sample movie_reviews \
121
124
mte_teip5 names nps_chat omw opinion_lexicon paradigms \
122
125
pil pl196x porter_test ppattach problem_reports product_reviews_1 product_reviews_2 propbank \
123
- pros_cons ptb punkt qc reuters rslp rte sample_grammars semcor senseval sentence_polarity \
126
+ pros_cons ptb punkt punkt_tab qc reuters rslp rte sample_grammars semcor senseval sentence_polarity \
124
127
sentiwordnet shakespeare sinica_treebank smultron snowball_data spanish_grammars \
125
128
state_union stopwords subjectivity swadesh switchboard tagsets timit toolbox treebank \
126
129
twitter_samples udhr2 udhr unicode_samples universal_tagset universal_treebanks_v20 \
@@ -198,7 +201,7 @@ ADD patches/kaggle_gcp.py \
198
201
199
202
# Figure out why this is in a different place?
200
203
# Found by doing a export PYTHONVERBOSE=1 and then running python and checking for where it looked for it.
201
- ADD patches/sitecustomize.py /usr/lib/python3.10 /sitecustomize.py
204
+ ADD patches/sitecustomize.py /usr/lib/python3.11 /sitecustomize.py
202
205
203
206
ARG GIT_COMMIT=unknown \
204
207
BUILD_DATE=unknown
0 commit comments